Apply mix formatter

This commit is contained in:
Marcel Otto 2020-06-29 10:37:42 +02:00
parent c880026224
commit 0e81f4c02c
129 changed files with 7078 additions and 5763 deletions

View File

@ -1,5 +1,7 @@
locals_without_parens = [ locals_without_parens = [
defvocab: 2, defvocab: 2,
def_facet_constraint: 2,
def_applicable_facet: 1,
bgp: 1 bgp: 1
] ]

View File

@ -2,46 +2,63 @@ defmodule NS do
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab MF, defvocab MF,
base_iri: "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#", base_iri: "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#",
terms: [], strict: false terms: [],
strict: false
defvocab RDFT, defvocab RDFT, base_iri: "http://www.w3.org/ns/rdftest#", terms: [], strict: false
base_iri: "http://www.w3.org/ns/rdftest#",
terms: [], strict: false
end end
alias NS.{MF, RDFT} alias NS.{MF, RDFT}
alias RDF.NS.RDFS alias RDF.NS.RDFS
alias RDF.Query.BGP alias RDF.Query.BGP
test_graph = RDF.Turtle.read_file!("test/data/TURTLE-TESTS/manifest.ttl", base: "http://www.w3.org/2013/TurtleTests/") test_graph =
RDF.Turtle.read_file!("test/data/TURTLE-TESTS/manifest.ttl",
base: "http://www.w3.org/2013/TurtleTests/"
)
all_query = %BGP{triple_patterns: [{:s, :p, :o}]} all_query = %BGP{triple_patterns: [{:s, :p, :o}]}
Benchee.run(%{ Benchee.run(%{
"take 1 from BGP.Simple" => fn -> BGP.Simple.stream(all_query, test_graph) |> Enum.take(1) end, "take 1 from BGP.Simple" => fn -> BGP.Simple.stream(all_query, test_graph) |> Enum.take(1) end,
"take 1 from BGP.Stream" => fn -> BGP.Stream.stream(all_query, test_graph) |> Enum.take(1) end, "take 1 from BGP.Stream" => fn -> BGP.Stream.stream(all_query, test_graph) |> Enum.take(1) end
}) })
# rdft:approval rdft:Approved - count: 287 # rdft:approval rdft:Approved - count: 287
approved_query = %BGP{triple_patterns: [ approved_query = %BGP{
{:test_case, RDFT.approval, RDF.iri(RDFT.Approved)}, triple_patterns: [
{:test_case, MF.name, :name}, {:test_case, RDFT.approval(), RDF.iri(RDFT.Approved)},
{:test_case, RDFS.comment, :comment}, {:test_case, MF.name(), :name},
]} {:test_case, RDFS.comment(), :comment}
]
}
# rdft:approval rdft:Proposed - count: 4 # rdft:approval rdft:Proposed - count: 4
proposed_query = %BGP{triple_patterns: [ proposed_query = %BGP{
{:test_case, RDFT.approval, RDF.iri(RDFT.Proposed)}, triple_patterns: [
{:test_case, MF.name, :name}, {:test_case, RDFT.approval(), RDF.iri(RDFT.Proposed)},
{:test_case, RDFS.comment, :comment}, {:test_case, MF.name(), :name},
]} {:test_case, RDFS.comment(), :comment}
]
}
Benchee.run(%{ Benchee.run(%{
"APPROVED from BGP.Simple" => fn -> BGP.Simple.execute(approved_query, test_graph) end, "APPROVED from BGP.Simple" => fn -> BGP.Simple.execute(approved_query, test_graph) end,
"PROPOSED from BGP.Simple" => fn -> BGP.Simple.execute(proposed_query, test_graph) end, "PROPOSED from BGP.Simple" => fn -> BGP.Simple.execute(proposed_query, test_graph) end,
"APPROVED from BGP.Stream (consumed)" => fn ->
"APPROVED from BGP.Stream (consumed)" => fn -> BGP.Stream.execute(approved_query, test_graph) end, BGP.Stream.execute(approved_query, test_graph)
"PROPOSED from BGP.Stream (consumed)" => fn -> BGP.Stream.execute(proposed_query, test_graph) end, end,
"APPROVED from BGP.Stream (unconsumed)" => fn -> BGP.Stream.stream(approved_query, test_graph) end, "PROPOSED from BGP.Stream (consumed)" => fn ->
"PROPOSED from BGP.Stream (unconsumed)" => fn -> BGP.Stream.stream(proposed_query, test_graph) end, BGP.Stream.execute(proposed_query, test_graph)
"APPROVED from BGP.Stream (1 consumed)" => fn -> BGP.Stream.stream(approved_query, test_graph) |> Enum.take(1) end end,
"APPROVED from BGP.Stream (unconsumed)" => fn ->
BGP.Stream.stream(approved_query, test_graph)
end,
"PROPOSED from BGP.Stream (unconsumed)" => fn ->
BGP.Stream.stream(proposed_query, test_graph)
end,
"APPROVED from BGP.Stream (1 consumed)" => fn ->
BGP.Stream.stream(approved_query, test_graph) |> Enum.take(1)
end
}) })

View File

@ -40,18 +40,27 @@ defmodule RDF do
For a general introduction you may refer to the guides on the [homepage](https://rdf-elixir.dev). For a general introduction you may refer to the guides on the [homepage](https://rdf-elixir.dev).
""" """
alias RDF.{IRI, Namespace, Literal, BlankNode, Triple, Quad, alias RDF.{
Description, Graph, Dataset, PrefixMap} IRI,
Namespace,
Literal,
BlankNode,
Triple,
Quad,
Description,
Graph,
Dataset,
PrefixMap
}
import RDF.Guards import RDF.Guards
import RDF.Utils.Bootstrapping import RDF.Utils.Bootstrapping
defdelegate default_base_iri(), to: RDF.IRI, as: :default_base defdelegate default_base_iri(), to: RDF.IRI, as: :default_base
@standard_prefixes PrefixMap.new( @standard_prefixes PrefixMap.new(
xsd: xsd_iri_base(), xsd: xsd_iri_base(),
rdf: rdf_iri_base(), rdf: rdf_iri_base(),
rdfs: rdfs_iri_base() rdfs: rdfs_iri_base()
) )
@ -67,7 +76,6 @@ defmodule RDF do
""" """
def standard_prefixes(), do: @standard_prefixes def standard_prefixes(), do: @standard_prefixes
@doc """ @doc """
A user-defined `RDF.PrefixMap` of prefixes to IRI namespaces. A user-defined `RDF.PrefixMap` of prefixes to IRI namespaces.
@ -114,16 +122,15 @@ defmodule RDF do
default_prefixes() |> PrefixMap.merge!(prefix_mappings) default_prefixes() |> PrefixMap.merge!(prefix_mappings)
end end
defdelegate read_string(content, opts), to: RDF.Serialization defdelegate read_string(content, opts), to: RDF.Serialization
defdelegate read_string!(content, opts), to: RDF.Serialization defdelegate read_string!(content, opts), to: RDF.Serialization
defdelegate read_file(filename, opts \\ []), to: RDF.Serialization defdelegate read_file(filename, opts \\ []), to: RDF.Serialization
defdelegate read_file!(filename, opts \\ []), to: RDF.Serialization defdelegate read_file!(filename, opts \\ []), to: RDF.Serialization
defdelegate write_string(content, opts), to: RDF.Serialization defdelegate write_string(content, opts), to: RDF.Serialization
defdelegate write_string!(content, opts), to: RDF.Serialization defdelegate write_string!(content, opts), to: RDF.Serialization
defdelegate write_file(content, filename, opts \\ []), to: RDF.Serialization defdelegate write_file(content, filename, opts \\ []), to: RDF.Serialization
defdelegate write_file!(content, filename, opts \\ []), to: RDF.Serialization defdelegate write_file!(content, filename, opts \\ []), to: RDF.Serialization
@doc """ @doc """
Checks if the given value is a RDF resource. Checks if the given value is a RDF resource.
@ -149,6 +156,7 @@ defmodule RDF do
def resource?(value) def resource?(value)
def resource?(%IRI{}), do: true def resource?(%IRI{}), do: true
def resource?(%BlankNode{}), do: true def resource?(%BlankNode{}), do: true
def resource?(qname) when maybe_ns_term(qname) do def resource?(qname) when maybe_ns_term(qname) do
case Namespace.resolve_term(qname) do case Namespace.resolve_term(qname) do
{:ok, iri} -> resource?(iri) {:ok, iri} -> resource?(iri)
@ -182,12 +190,12 @@ defmodule RDF do
""" """
def term?(value) def term?(value)
def term?(%Literal{}), do: true def term?(%Literal{}), do: true
def term?(value), do: resource?(value) def term?(value), do: resource?(value)
defdelegate uri?(value), to: IRI, as: :valid? defdelegate uri?(value), to: IRI, as: :valid?
defdelegate iri?(value), to: IRI, as: :valid? defdelegate iri?(value), to: IRI, as: :valid?
defdelegate uri(value), to: IRI, as: :new defdelegate uri(value), to: IRI, as: :new
defdelegate iri(value), to: IRI, as: :new defdelegate iri(value), to: IRI, as: :new
defdelegate uri!(value), to: IRI, as: :new! defdelegate uri!(value), to: IRI, as: :new!
defdelegate iri!(value), to: IRI, as: :new! defdelegate iri!(value), to: IRI, as: :new!
@ -206,7 +214,7 @@ defmodule RDF do
def bnode?(%BlankNode{}), do: true def bnode?(%BlankNode{}), do: true
def bnode?(_), do: false def bnode?(_), do: false
defdelegate bnode(), to: BlankNode, as: :new defdelegate bnode(), to: BlankNode, as: :new
defdelegate bnode(id), to: BlankNode, as: :new defdelegate bnode(id), to: BlankNode, as: :new
@doc """ @doc """
@ -215,59 +223,59 @@ defmodule RDF do
def literal?(%Literal{}), do: true def literal?(%Literal{}), do: true
def literal?(_), do: false def literal?(_), do: false
defdelegate literal(value), to: Literal, as: :new defdelegate literal(value), to: Literal, as: :new
defdelegate literal(value, opts), to: Literal, as: :new defdelegate literal(value, opts), to: Literal, as: :new
defdelegate triple(s, p, o), to: Triple, as: :new defdelegate triple(s, p, o), to: Triple, as: :new
defdelegate triple(tuple), to: Triple, as: :new defdelegate triple(tuple), to: Triple, as: :new
defdelegate quad(s, p, o, g), to: Quad, as: :new defdelegate quad(s, p, o, g), to: Quad, as: :new
defdelegate quad(tuple), to: Quad, as: :new defdelegate quad(tuple), to: Quad, as: :new
defdelegate description(arg), to: Description, as: :new defdelegate description(arg), to: Description, as: :new
defdelegate description(arg1, arg2), to: Description, as: :new defdelegate description(arg1, arg2), to: Description, as: :new
defdelegate description(arg1, arg2, arg3), to: Description, as: :new defdelegate description(arg1, arg2, arg3), to: Description, as: :new
defdelegate graph(), to: Graph, as: :new defdelegate graph(), to: Graph, as: :new
defdelegate graph(arg), to: Graph, as: :new defdelegate graph(arg), to: Graph, as: :new
defdelegate graph(arg1, arg2), to: Graph, as: :new defdelegate graph(arg1, arg2), to: Graph, as: :new
defdelegate graph(arg1, arg2, arg3), to: Graph, as: :new defdelegate graph(arg1, arg2, arg3), to: Graph, as: :new
defdelegate graph(arg1, arg2, arg3, arg4), to: Graph, as: :new defdelegate graph(arg1, arg2, arg3, arg4), to: Graph, as: :new
defdelegate dataset(), to: Dataset, as: :new defdelegate dataset(), to: Dataset, as: :new
defdelegate dataset(arg), to: Dataset, as: :new defdelegate dataset(arg), to: Dataset, as: :new
defdelegate dataset(arg1, arg2), to: Dataset, as: :new defdelegate dataset(arg1, arg2), to: Dataset, as: :new
defdelegate diff(arg1, arg2), to: RDF.Diff defdelegate diff(arg1, arg2), to: RDF.Diff
defdelegate list?(resource, graph), to: RDF.List, as: :node? defdelegate list?(resource, graph), to: RDF.List, as: :node?
defdelegate list?(description), to: RDF.List, as: :node? defdelegate list?(description), to: RDF.List, as: :node?
def list(native_list), do: RDF.List.from(native_list) def list(native_list), do: RDF.List.from(native_list)
def list(head, %Graph{} = graph), do: RDF.List.new(head, graph) def list(head, %Graph{} = graph), do: RDF.List.new(head, graph)
def list(native_list, opts), do: RDF.List.from(native_list, opts) def list(native_list, opts), do: RDF.List.from(native_list, opts)
defdelegate prefix_map(prefixes), to: RDF.PrefixMap, as: :new defdelegate prefix_map(prefixes), to: RDF.PrefixMap, as: :new
defdelegate langString(value, opts), to: RDF.LangString, as: :new defdelegate langString(value, opts), to: RDF.LangString, as: :new
defdelegate lang_string(value, opts), to: RDF.LangString, as: :new defdelegate lang_string(value, opts), to: RDF.LangString, as: :new
for term <- ~w[type subject predicate object first rest value]a do for term <- ~w[type subject predicate object first rest value]a do
defdelegate unquote(term)(), to: RDF.NS.RDF defdelegate unquote(term)(), to: RDF.NS.RDF
@doc false @doc false
defdelegate unquote(term)(s, o), to: RDF.NS.RDF defdelegate unquote(term)(s, o), to: RDF.NS.RDF
@doc false @doc false
defdelegate unquote(term)(s, o1, o2), to: RDF.NS.RDF defdelegate unquote(term)(s, o1, o2), to: RDF.NS.RDF
@doc false @doc false
defdelegate unquote(term)(s, o1, o2, o3), to: RDF.NS.RDF defdelegate unquote(term)(s, o1, o2, o3), to: RDF.NS.RDF
@doc false @doc false
defdelegate unquote(term)(s, o1, o2, o3, o4), to: RDF.NS.RDF defdelegate unquote(term)(s, o1, o2, o3, o4), to: RDF.NS.RDF
@doc false @doc false
defdelegate unquote(term)(s, o1, o2, o3, o4, o5), to: RDF.NS.RDF defdelegate unquote(term)(s, o1, o2, o3, o4, o5), to: RDF.NS.RDF
end end
defdelegate langString(), to: RDF.NS.RDF defdelegate langString(), to: RDF.NS.RDF
defdelegate lang_string(), to: RDF.NS.RDF, as: :langString defdelegate lang_string(), to: RDF.NS.RDF, as: :langString
defdelegate unquote(nil)(), to: RDF.NS.RDF defdelegate unquote(nil)(), to: RDF.NS.RDF
defdelegate __base_iri__(), to: RDF.NS.RDF defdelegate __base_iri__(), to: RDF.NS.RDF

View File

@ -7,8 +7,8 @@ defmodule RDF.BlankNode do
""" """
@type t :: %__MODULE__{ @type t :: %__MODULE__{
id: String.t id: String.t()
} }
@enforce_keys [:id] @enforce_keys [:id]
defstruct [:id] defstruct [:id]
@ -28,19 +28,18 @@ defmodule RDF.BlankNode do
iex> RDF.bnode(:foo) iex> RDF.bnode(:foo)
%RDF.BlankNode{id: "foo"} %RDF.BlankNode{id: "foo"}
""" """
@spec new(reference | String.t | atom | integer) :: t @spec new(reference | String.t() | atom | integer) :: t
def new(id) def new(id)
def new(id) when is_binary(id), def new(id) when is_binary(id),
do: %__MODULE__{id: id} do: %__MODULE__{id: id}
def new(id) when is_reference(id), def new(id) when is_reference(id),
do: id |> :erlang.ref_to_list |> to_string |> String.replace(~r/\<|\>/, "") |> new do: id |> :erlang.ref_to_list() |> to_string |> String.replace(~r/\<|\>/, "") |> new
def new(id) when is_atom(id) or is_integer(id), def new(id) when is_atom(id) or is_integer(id),
do: id |> to_string |> new do: id |> to_string |> new
@doc """ @doc """
Tests for value equality of blank nodes. Tests for value equality of blank nodes.
@ -55,9 +54,7 @@ defmodule RDF.BlankNode do
def equal_value?(_, _), def equal_value?(_, _),
do: nil do: nil
defimpl String.Chars do defimpl String.Chars do
def to_string(%RDF.BlankNode{id: id}), do: "_:#{id}" def to_string(%RDF.BlankNode{id: id}), do: "_:#{id}"
end end
end end

View File

@ -5,7 +5,6 @@ defmodule RDF.BlankNode.Generator do
use GenServer use GenServer
# Client API ############################################################### # Client API ###############################################################
@doc """ @doc """
@ -30,7 +29,6 @@ defmodule RDF.BlankNode.Generator do
defp convert_opts(opts) when is_list(opts), do: Map.new(opts) defp convert_opts(opts) when is_list(opts), do: Map.new(opts)
defp convert_opts(opts) when is_map(opts), do: opts defp convert_opts(opts) when is_map(opts), do: opts
@doc """ @doc """
Synchronously stops the blank node generator with the given `reason`. Synchronously stops the blank node generator with the given `reason`.
@ -45,7 +43,6 @@ defmodule RDF.BlankNode.Generator do
GenServer.stop(pid, reason, timeout) GenServer.stop(pid, reason, timeout)
end end
@doc """ @doc """
Generates a new blank node according to the `RDF.BlankNode.Generator.Algorithm` set up. Generates a new blank node according to the `RDF.BlankNode.Generator.Algorithm` set up.
""" """
@ -53,7 +50,6 @@ defmodule RDF.BlankNode.Generator do
GenServer.call(pid, :generate) GenServer.call(pid, :generate)
end end
@doc """ @doc """
Generates a blank node for a given value according to the `RDF.BlankNode.Generator.Algorithm` set up. Generates a blank node for a given value according to the `RDF.BlankNode.Generator.Algorithm` set up.
""" """
@ -61,7 +57,6 @@ defmodule RDF.BlankNode.Generator do
GenServer.call(pid, {:generate_for, value}) GenServer.call(pid, {:generate_for, value})
end end
# Server Callbacks ######################################################### # Server Callbacks #########################################################
@impl GenServer @impl GenServer
@ -69,7 +64,6 @@ defmodule RDF.BlankNode.Generator do
{:ok, {generation_mod, generation_mod.init(init_opts)}} {:ok, {generation_mod, generation_mod.init(init_opts)}}
end end
@impl GenServer @impl GenServer
def handle_call(:generate, _from, {generation_mod, state}) do def handle_call(:generate, _from, {generation_mod, state}) do
with {bnode, new_state} = generation_mod.generate(state) do with {bnode, new_state} = generation_mod.generate(state) do
@ -83,5 +77,4 @@ defmodule RDF.BlankNode.Generator do
{:reply, bnode, {generation_mod, new_state}} {:reply, bnode, {generation_mod, new_state}}
end end
end end
end end

View File

@ -16,7 +16,7 @@ defmodule RDF.BlankNode.Generator.Algorithm do
An implementation should compute a blank node from the given state and return An implementation should compute a blank node from the given state and return
a tuple consisting of the generated blank node and the new state. a tuple consisting of the generated blank node and the new state.
""" """
@callback generate(state :: map) :: {RDF.BlankNode.t, map} @callback generate(state :: map) :: {RDF.BlankNode.t(), map}
@doc """ @doc """
Generates a blank node for a given string. Generates a blank node for a given string.
@ -27,6 +27,5 @@ defmodule RDF.BlankNode.Generator.Algorithm do
given state and return a tuple consisting of the generated blank node and the given state and return a tuple consisting of the generated blank node and the
new state. new state.
""" """
@callback generate_for(value :: any, state :: map) :: {RDF.BlankNode.t, map} @callback generate_for(value :: any, state :: map) :: {RDF.BlankNode.t(), map}
end end

View File

@ -40,7 +40,8 @@ defmodule RDF.BlankNode.Increment do
case Map.get(map, value) do case Map.get(map, value) do
nil -> nil ->
{bnode(counter, state), {bnode(counter, state),
%{state | map: Map.put(map, value, counter), counter: counter + 1}} %{state | map: Map.put(map, value, counter), counter: counter + 1}}
previous -> previous ->
{bnode(previous, state), state} {bnode(previous, state), state}
end end
@ -53,5 +54,4 @@ defmodule RDF.BlankNode.Increment do
defp bnode(counter, _) do defp bnode(counter, _) do
BlankNode.new(counter) BlankNode.new(counter)
end end
end end

View File

@ -137,10 +137,12 @@ defimpl RDF.Data, for: RDF.Description do
def merge(description, {_, _, _, _} = quad), def merge(description, {_, _, _, _} = quad),
do: RDF.Dataset.new(description) |> RDF.Dataset.add(quad) do: RDF.Dataset.new(description) |> RDF.Dataset.add(quad)
def merge(%RDF.Description{subject: subject} = description, def merge(
%RDF.Description{subject: other_subject} = other_description) %RDF.Description{subject: subject} = description,
when other_subject == subject, %RDF.Description{subject: other_subject} = other_description
do: RDF.Description.add(description, other_description) )
when other_subject == subject,
do: RDF.Description.add(description, other_description)
def merge(description, %RDF.Description{} = other_description), def merge(description, %RDF.Description{} = other_description),
do: RDF.Graph.new(description) |> RDF.Graph.add(other_description) do: RDF.Graph.new(description) |> RDF.Graph.add(other_description)
@ -151,14 +153,16 @@ defimpl RDF.Data, for: RDF.Description do
def merge(description, %RDF.Dataset{} = dataset), def merge(description, %RDF.Dataset{} = dataset),
do: RDF.Data.merge(dataset, description) do: RDF.Data.merge(dataset, description)
def delete(
%RDF.Description{subject: subject} = description,
%RDF.Description{subject: other_subject}
)
when subject != other_subject,
do: description
def delete(%RDF.Description{subject: subject} = description,
%RDF.Description{subject: other_subject})
when subject != other_subject, do: description
def delete(description, statements), do: RDF.Description.delete(description, statements) def delete(description, statements), do: RDF.Description.delete(description, statements)
def pop(description), do: RDF.Description.pop(description)
def pop(description), do: RDF.Description.pop(description)
def include?(description, statements), def include?(description, statements),
do: RDF.Description.include?(description, statements) do: RDF.Description.include?(description, statements)
@ -180,14 +184,14 @@ defimpl RDF.Data, for: RDF.Description do
def subjects(%RDF.Description{subject: subject}), do: MapSet.new([subject]) def subjects(%RDF.Description{subject: subject}), do: MapSet.new([subject])
def predicates(description), do: RDF.Description.predicates(description) def predicates(description), do: RDF.Description.predicates(description)
def objects(description), do: RDF.Description.objects(description) def objects(description), do: RDF.Description.objects(description)
def resources(%RDF.Description{subject: subject} = description), def resources(%RDF.Description{subject: subject} = description),
do: RDF.Description.resources(description) |> MapSet.put(subject) do: RDF.Description.resources(description) |> MapSet.put(subject)
def subject_count(_), do: 1 def subject_count(_), do: 1
def statement_count(description), do: RDF.Description.count(description) def statement_count(description), do: RDF.Description.count(description)
def values(description), do: RDF.Description.values(description) def values(description), do: RDF.Description.values(description)
def values(description, mapping), do: RDF.Description.values(description, mapping) def values(description, mapping), do: RDF.Description.values(description, mapping)
def equal?(description, %RDF.Description{} = other_description) do def equal?(description, %RDF.Description{} = other_description) do
@ -209,7 +213,6 @@ defimpl RDF.Data, for: RDF.Description do
def equal?(_, _), do: false def equal?(_, _), do: false
end end
defimpl RDF.Data, for: RDF.Graph do defimpl RDF.Data, for: RDF.Graph do
def merge(%RDF.Graph{name: name} = graph, {_, _, _, graph_context} = quad) do def merge(%RDF.Graph{name: name} = graph, {_, _, _, graph_context} = quad) do
with ^name <- RDF.Statement.coerce_graph_name(graph_context) do with ^name <- RDF.Statement.coerce_graph_name(graph_context) do
@ -230,10 +233,12 @@ defimpl RDF.Data, for: RDF.Graph do
def merge(graph, %RDF.Description{} = description), def merge(graph, %RDF.Description{} = description),
do: RDF.Graph.add(graph, description) do: RDF.Graph.add(graph, description)
def merge(%RDF.Graph{name: name} = graph, def merge(
%RDF.Graph{name: other_name} = other_graph) %RDF.Graph{name: name} = graph,
when other_name == name, %RDF.Graph{name: other_name} = other_graph
do: RDF.Graph.add(graph, other_graph) )
when other_name == name,
do: RDF.Graph.add(graph, other_graph)
def merge(graph, %RDF.Graph{} = other_graph), def merge(graph, %RDF.Graph{} = other_graph),
do: RDF.Dataset.new(graph) |> RDF.Dataset.add(other_graph) do: RDF.Dataset.new(graph) |> RDF.Dataset.add(other_graph)
@ -241,12 +246,13 @@ defimpl RDF.Data, for: RDF.Graph do
def merge(graph, %RDF.Dataset{} = dataset), def merge(graph, %RDF.Dataset{} = dataset),
do: RDF.Data.merge(dataset, graph) do: RDF.Data.merge(dataset, graph)
def delete(%RDF.Graph{name: name} = graph, %RDF.Graph{name: other_name}) def delete(%RDF.Graph{name: name} = graph, %RDF.Graph{name: other_name})
when name != other_name, do: graph when name != other_name,
do: graph
def delete(graph, statements), do: RDF.Graph.delete(graph, statements) def delete(graph, statements), do: RDF.Graph.delete(graph, statements)
def pop(graph), do: RDF.Graph.pop(graph) def pop(graph), do: RDF.Graph.pop(graph)
def include?(graph, statements), do: RDF.Graph.include?(graph, statements) def include?(graph, statements), do: RDF.Graph.include?(graph, statements)
@ -260,22 +266,25 @@ defimpl RDF.Data, for: RDF.Graph do
def statements(graph), do: RDF.Graph.statements(graph) def statements(graph), do: RDF.Graph.statements(graph)
def subjects(graph), do: RDF.Graph.subjects(graph) def subjects(graph), do: RDF.Graph.subjects(graph)
def predicates(graph), do: RDF.Graph.predicates(graph) def predicates(graph), do: RDF.Graph.predicates(graph)
def objects(graph), do: RDF.Graph.objects(graph) def objects(graph), do: RDF.Graph.objects(graph)
def resources(graph), do: RDF.Graph.resources(graph) def resources(graph), do: RDF.Graph.resources(graph)
def subject_count(graph), do: RDF.Graph.subject_count(graph) def subject_count(graph), do: RDF.Graph.subject_count(graph)
def statement_count(graph), do: RDF.Graph.triple_count(graph) def statement_count(graph), do: RDF.Graph.triple_count(graph)
def values(graph), do: RDF.Graph.values(graph) def values(graph), do: RDF.Graph.values(graph)
def values(graph, mapping), do: RDF.Graph.values(graph, mapping) def values(graph, mapping), do: RDF.Graph.values(graph, mapping)
def equal?(graph, %RDF.Description{} = description), def equal?(graph, %RDF.Description{} = description),
do: RDF.Data.equal?(description, graph) do: RDF.Data.equal?(description, graph)
def equal?(graph, %RDF.Graph{} = other_graph), def equal?(graph, %RDF.Graph{} = other_graph),
do: RDF.Graph.equal?(%RDF.Graph{graph | name: nil}, do:
%RDF.Graph{other_graph | name: nil}) RDF.Graph.equal?(
%RDF.Graph{graph | name: nil},
%RDF.Graph{other_graph | name: nil}
)
def equal?(graph, %RDF.Dataset{} = dataset), def equal?(graph, %RDF.Dataset{} = dataset),
do: RDF.Data.equal?(dataset, graph) do: RDF.Data.equal?(dataset, graph)
@ -283,25 +292,29 @@ defimpl RDF.Data, for: RDF.Graph do
def equal?(_, _), do: false def equal?(_, _), do: false
end end
defimpl RDF.Data, for: RDF.Dataset do defimpl RDF.Data, for: RDF.Dataset do
def merge(dataset, {_, _, _} = triple), def merge(dataset, {_, _, _} = triple),
do: RDF.Dataset.add(dataset, triple) do: RDF.Dataset.add(dataset, triple)
def merge(dataset, {_, _, _, _} = quad), def merge(dataset, {_, _, _, _} = quad),
do: RDF.Dataset.add(dataset, quad) do: RDF.Dataset.add(dataset, quad)
def merge(dataset, %RDF.Description{} = description), def merge(dataset, %RDF.Description{} = description),
do: RDF.Dataset.add(dataset, description) do: RDF.Dataset.add(dataset, description)
def merge(dataset, %RDF.Graph{} = graph), def merge(dataset, %RDF.Graph{} = graph),
do: RDF.Dataset.add(dataset, graph) do: RDF.Dataset.add(dataset, graph)
def merge(dataset, %RDF.Dataset{} = other_dataset), def merge(dataset, %RDF.Dataset{} = other_dataset),
do: RDF.Dataset.add(dataset, other_dataset) do: RDF.Dataset.add(dataset, other_dataset)
def delete(%RDF.Dataset{name: name} = dataset, %RDF.Dataset{name: other_name}) def delete(%RDF.Dataset{name: name} = dataset, %RDF.Dataset{name: other_name})
when name != other_name, do: dataset when name != other_name,
do: dataset
def delete(dataset, statements), do: RDF.Dataset.delete(dataset, statements) def delete(dataset, statements), do: RDF.Dataset.delete(dataset, statements)
def pop(dataset), do: RDF.Dataset.pop(dataset) def pop(dataset), do: RDF.Dataset.pop(dataset)
def include?(dataset, statements), do: RDF.Dataset.include?(dataset, statements) def include?(dataset, statements), do: RDF.Dataset.include?(dataset, statements)
@ -310,31 +323,32 @@ defimpl RDF.Data, for: RDF.Dataset do
def description(dataset, subject) do def description(dataset, subject) do
with subject = RDF.Statement.coerce_subject(subject) do with subject = RDF.Statement.coerce_subject(subject) do
Enum.reduce RDF.Dataset.graphs(dataset), RDF.Description.new(subject), fn Enum.reduce(RDF.Dataset.graphs(dataset), RDF.Description.new(subject), fn
%RDF.Graph{descriptions: %{^subject => graph_description}}, description -> %RDF.Graph{descriptions: %{^subject => graph_description}}, description ->
RDF.Description.add(description, graph_description) RDF.Description.add(description, graph_description)
_, description -> _, description ->
description description
end end)
end end
end end
def descriptions(dataset) do def descriptions(dataset) do
dataset dataset
|> subjects |> subjects
|> Enum.map(&(description(dataset, &1))) |> Enum.map(&description(dataset, &1))
end end
def statements(dataset), do: RDF.Dataset.statements(dataset) def statements(dataset), do: RDF.Dataset.statements(dataset)
def subjects(dataset), do: RDF.Dataset.subjects(dataset) def subjects(dataset), do: RDF.Dataset.subjects(dataset)
def predicates(dataset), do: RDF.Dataset.predicates(dataset) def predicates(dataset), do: RDF.Dataset.predicates(dataset)
def objects(dataset), do: RDF.Dataset.objects(dataset) def objects(dataset), do: RDF.Dataset.objects(dataset)
def resources(dataset), do: RDF.Dataset.resources(dataset) def resources(dataset), do: RDF.Dataset.resources(dataset)
def subject_count(dataset), do: dataset |> subjects |> Enum.count def subject_count(dataset), do: dataset |> subjects |> Enum.count()
def statement_count(dataset), do: RDF.Dataset.statement_count(dataset) def statement_count(dataset), do: RDF.Dataset.statement_count(dataset)
def values(dataset), do: RDF.Dataset.values(dataset) def values(dataset), do: RDF.Dataset.values(dataset)
def values(dataset, mapping), do: RDF.Dataset.values(dataset, mapping) def values(dataset, mapping), do: RDF.Dataset.values(dataset, mapping)
def equal?(dataset, %RDF.Description{} = description) do def equal?(dataset, %RDF.Description{} = description) do
@ -354,8 +368,10 @@ defimpl RDF.Data, for: RDF.Dataset do
end end
def equal?(dataset, %RDF.Dataset{} = other_dataset) do def equal?(dataset, %RDF.Dataset{} = other_dataset) do
RDF.Dataset.equal?(%RDF.Dataset{dataset | name: nil}, RDF.Dataset.equal?(
%RDF.Dataset{other_dataset | name: nil}) %RDF.Dataset{dataset | name: nil},
%RDF.Dataset{other_dataset | name: nil}
)
end end
def equal?(_, _), do: false def equal?(_, _), do: false

View File

@ -18,20 +18,19 @@ defmodule RDF.Dataset do
alias RDF.{Description, Graph, IRI, Statement} alias RDF.{Description, Graph, IRI, Statement}
import RDF.Statement import RDF.Statement
@type graph_name :: IRI.t | nil @type graph_name :: IRI.t() | nil
@type t :: %__MODULE__{ @type t :: %__MODULE__{
name: graph_name, name: graph_name,
graphs: %{graph_name => Graph.t} graphs: %{graph_name => Graph.t()}
} }
@type input :: Graph.input | t @type input :: Graph.input() | t
@type update_graph_fun :: (Graph.t -> {Graph.t, input} | :pop) @type update_graph_fun :: (Graph.t() -> {Graph.t(), input} | :pop)
defstruct name: nil, graphs: %{} defstruct name: nil, graphs: %{}
@doc """ @doc """
Creates an empty unnamed `RDF.Dataset`. Creates an empty unnamed `RDF.Dataset`.
""" """
@ -96,7 +95,6 @@ defmodule RDF.Dataset do
|> add(data) |> add(data)
end end
@doc """ @doc """
Adds triples and quads to a `RDF.Dataset`. Adds triples and quads to a `RDF.Dataset`.
@ -109,9 +107,9 @@ defmodule RDF.Dataset do
def add(dataset, statements, graph_context) when is_list(statements) do def add(dataset, statements, graph_context) when is_list(statements) do
with graph_context = graph_context && coerce_graph_name(graph_context) do with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce statements, dataset, fn (statement, dataset) -> Enum.reduce(statements, dataset, fn statement, dataset ->
add(dataset, statement, graph_context) add(dataset, statement, graph_context)
end end)
end end
end end
@ -121,13 +119,20 @@ defmodule RDF.Dataset do
def add(dataset, {subject, predicate, objects}, graph_context), def add(dataset, {subject, predicate, objects}, graph_context),
do: add(dataset, {subject, predicate, objects, graph_context}) do: add(dataset, {subject, predicate, objects, graph_context})
def add(%__MODULE__{name: name, graphs: graphs}, def add(
{subject, predicate, objects, graph_context}, false) do %__MODULE__{name: name, graphs: graphs},
{subject, predicate, objects, graph_context},
false
) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
updated_graphs = updated_graphs =
Map.update(graphs, graph_context, Map.update(
graphs,
graph_context,
Graph.new({subject, predicate, objects}, name: graph_context), Graph.new({subject, predicate, objects}, name: graph_context),
fn graph -> Graph.add(graph, {subject, predicate, objects}) end) fn graph -> Graph.add(graph, {subject, predicate, objects}) end
)
%__MODULE__{name: name, graphs: updated_graphs} %__MODULE__{name: name, graphs: updated_graphs}
end end
end end
@ -138,21 +143,22 @@ defmodule RDF.Dataset do
def add(%__MODULE__{} = dataset, %Description{} = description, false), def add(%__MODULE__{} = dataset, %Description{} = description, false),
do: add(dataset, description, nil) do: add(dataset, description, nil)
def add(%__MODULE__{name: name, graphs: graphs}, def add(%__MODULE__{name: name, graphs: graphs}, %Description{} = description, graph_context) do
%Description{} = description, graph_context) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
updated_graph = updated_graph =
Map.get(graphs, graph_context, Graph.new(name: graph_context)) Map.get(graphs, graph_context, Graph.new(name: graph_context))
|> Graph.add(description) |> Graph.add(description)
%__MODULE__{ %__MODULE__{
name: name, name: name,
graphs: Map.put(graphs, graph_context, updated_graph) graphs: Map.put(graphs, graph_context, updated_graph)
} }
end end
end end
def add(%__MODULE__{name: name, graphs: graphs}, %Graph{} = graph, false) do def add(%__MODULE__{name: name, graphs: graphs}, %Graph{} = graph, false) do
%__MODULE__{name: name, %__MODULE__{
name: name,
graphs: graphs:
Map.update(graphs, graph.name, graph, fn current -> Map.update(graphs, graph.name, graph, fn current ->
Graph.add(current, graph) Graph.add(current, graph)
@ -165,13 +171,12 @@ defmodule RDF.Dataset do
def add(%__MODULE__{} = dataset, %__MODULE__{} = other_dataset, graph_context) do def add(%__MODULE__{} = dataset, %__MODULE__{} = other_dataset, graph_context) do
with graph_context = graph_context && coerce_graph_name(graph_context) do with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce graphs(other_dataset), dataset, fn (graph, dataset) -> Enum.reduce(graphs(other_dataset), dataset, fn graph, dataset ->
add(dataset, graph, graph_context) add(dataset, graph, graph_context)
end end)
end end
end end
@doc """ @doc """
Adds statements to a `RDF.Dataset` and overwrites all existing statements with the same subjects and predicates in the specified graph context. Adds statements to a `RDF.Dataset` and overwrites all existing statements with the same subjects and predicates in the specified graph context.
@ -186,7 +191,7 @@ defmodule RDF.Dataset do
...> RDF.Dataset.put([{EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}]) ...> RDF.Dataset.put([{EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}])
RDF.Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}]) RDF.Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}])
""" """
@spec put(t, input | [input], Statement.coercible_graph_name | boolean | nil) :: t @spec put(t, input | [input], Statement.coercible_graph_name() | boolean | nil) :: t
def put(dataset, statements, graph_context \\ false) def put(dataset, statements, graph_context \\ false)
def put(%__MODULE__{} = dataset, {subject, predicate, objects}, false), def put(%__MODULE__{} = dataset, {subject, predicate, objects}, false),
@ -195,18 +200,22 @@ defmodule RDF.Dataset do
def put(%__MODULE__{} = dataset, {subject, predicate, objects}, graph_context), def put(%__MODULE__{} = dataset, {subject, predicate, objects}, graph_context),
do: put(dataset, {subject, predicate, objects, graph_context}) do: put(dataset, {subject, predicate, objects, graph_context})
def put(%__MODULE__{name: name, graphs: graphs}, def put(
{subject, predicate, objects, graph_context}, false) do %__MODULE__{name: name, graphs: graphs},
{subject, predicate, objects, graph_context},
false
) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
new_graph = new_graph =
case graphs[graph_context] do case graphs[graph_context] do
graph = %Graph{} -> graph = %Graph{} ->
Graph.put(graph, {subject, predicate, objects}) Graph.put(graph, {subject, predicate, objects})
nil -> nil ->
Graph.new({subject, predicate, objects}, name: graph_context) Graph.new({subject, predicate, objects}, name: graph_context)
end end
%__MODULE__{name: name,
graphs: Map.put(graphs, graph_context, new_graph)} %__MODULE__{name: name, graphs: Map.put(graphs, graph_context, new_graph)}
end end
end end
@ -214,50 +223,61 @@ defmodule RDF.Dataset do
do: put(dataset, {subject, predicate, objects, graph_context}, false) do: put(dataset, {subject, predicate, objects, graph_context}, false)
def put(%__MODULE__{} = dataset, statements, false) when is_list(statements) do def put(%__MODULE__{} = dataset, statements, false) when is_list(statements) do
do_put dataset, Enum.group_by(statements, do_put(
dataset,
Enum.group_by(
statements,
fn fn
{s, _, _} -> {s, nil} {s, _, _} -> {s, nil}
{s, _, _, nil} -> {s, nil} {s, _, _, nil} -> {s, nil}
{s, _, _, c} -> {s, coerce_graph_name(c)} {s, _, _, c} -> {s, coerce_graph_name(c)}
end, end,
fn fn
{_, p, o, _} -> {p, o} {_, p, o, _} -> {p, o}
{_, p, o} -> {p, o} {_, p, o} -> {p, o}
end) end
)
)
end end
def put(%__MODULE__{} = dataset, statements, graph_context) when is_list(statements) do def put(%__MODULE__{} = dataset, statements, graph_context) when is_list(statements) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
do_put dataset, Enum.group_by(statements, do_put(
dataset,
Enum.group_by(
statements,
fn fn
{s, _, _, _} -> {s, graph_context} {s, _, _, _} -> {s, graph_context}
{s, _, _} -> {s, graph_context} {s, _, _} -> {s, graph_context}
end, end,
fn fn
{_, p, o, _} -> {p, o} {_, p, o, _} -> {p, o}
{_, p, o} -> {p, o} {_, p, o} -> {p, o}
end) end
)
)
end end
end end
def put(%__MODULE__{} = dataset, %Description{} = description, false), def put(%__MODULE__{} = dataset, %Description{} = description, false),
do: put(dataset, description, nil) do: put(dataset, description, nil)
def put(%__MODULE__{name: name, graphs: graphs}, def put(%__MODULE__{name: name, graphs: graphs}, %Description{} = description, graph_context) do
%Description{} = description, graph_context) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
updated_graph = updated_graph =
Map.get(graphs, graph_context, Graph.new(name: graph_context)) Map.get(graphs, graph_context, Graph.new(name: graph_context))
|> Graph.put(description) |> Graph.put(description)
%__MODULE__{ %__MODULE__{
name: name, name: name,
graphs: Map.put(graphs, graph_context, updated_graph) graphs: Map.put(graphs, graph_context, updated_graph)
} }
end end
end end
def put(%__MODULE__{name: name, graphs: graphs}, %Graph{} = graph, false) do def put(%__MODULE__{name: name, graphs: graphs}, %Graph{} = graph, false) do
%__MODULE__{name: name, %__MODULE__{
name: name,
graphs: graphs:
Map.update(graphs, graph.name, graph, fn current -> Map.update(graphs, graph.name, graph, fn current ->
Graph.put(current, graph) Graph.put(current, graph)
@ -270,31 +290,31 @@ defmodule RDF.Dataset do
def put(%__MODULE__{} = dataset, %__MODULE__{} = other_dataset, graph_context) do def put(%__MODULE__{} = dataset, %__MODULE__{} = other_dataset, graph_context) do
with graph_context = graph_context && coerce_graph_name(graph_context) do with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce graphs(other_dataset), dataset, fn (graph, dataset) -> Enum.reduce(graphs(other_dataset), dataset, fn graph, dataset ->
put(dataset, graph, graph_context) put(dataset, graph, graph_context)
end end)
end end
end end
defp do_put(%__MODULE__{} = dataset, statements) when is_map(statements) do defp do_put(%__MODULE__{} = dataset, statements) when is_map(statements) do
Enum.reduce statements, dataset, Enum.reduce(statements, dataset, fn {subject_with_context, predications}, dataset ->
fn ({subject_with_context, predications}, dataset) -> do_put(dataset, subject_with_context, predications)
do_put(dataset, subject_with_context, predications) end)
end
end end
defp do_put(%__MODULE__{name: name, graphs: graphs}, defp do_put(%__MODULE__{name: name, graphs: graphs}, {subject, graph_context}, predications)
{subject, graph_context}, predications) when is_list(predications) do
when is_list(predications) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
graph = Map.get(graphs, graph_context, Graph.new(name: graph_context)) graph = Map.get(graphs, graph_context, Graph.new(name: graph_context))
new_graphs = graphs
new_graphs =
graphs
|> Map.put(graph_context, Graph.put(graph, subject, predications)) |> Map.put(graph_context, Graph.put(graph, subject, predications))
%__MODULE__{name: name, graphs: new_graphs} %__MODULE__{name: name, graphs: new_graphs}
end end
end end
@doc """ @doc """
Deletes statements from a `RDF.Dataset`. Deletes statements from a `RDF.Dataset`.
@ -307,14 +327,14 @@ defmodule RDF.Dataset do
are deleted. If you want to delete only datasets with matching names, you can are deleted. If you want to delete only datasets with matching names, you can
use `RDF.Data.delete/2`. use `RDF.Data.delete/2`.
""" """
@spec delete(t, input | [input], Statement.coercible_graph_name | boolean | nil) :: t @spec delete(t, input | [input], Statement.coercible_graph_name() | boolean | nil) :: t
def delete(dataset, statements, graph_context \\ false) def delete(dataset, statements, graph_context \\ false)
def delete(%__MODULE__{} = dataset, statements, graph_context) when is_list(statements) do def delete(%__MODULE__{} = dataset, statements, graph_context) when is_list(statements) do
with graph_context = graph_context && coerce_graph_name(graph_context) do with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce statements, dataset, fn (statement, dataset) -> Enum.reduce(statements, dataset, fn statement, dataset ->
delete(dataset, statement, graph_context) delete(dataset, statement, graph_context)
end end)
end end
end end
@ -343,18 +363,17 @@ defmodule RDF.Dataset do
do: do_delete(dataset, graph_context, graph) do: do_delete(dataset, graph_context, graph)
def delete(%__MODULE__{} = dataset, %__MODULE__{graphs: graphs}, graph_context) do def delete(%__MODULE__{} = dataset, %__MODULE__{graphs: graphs}, graph_context) do
Enum.reduce graphs, dataset, fn ({_, graph}, dataset) -> Enum.reduce(graphs, dataset, fn {_, graph}, dataset ->
delete(dataset, graph, graph_context) delete(dataset, graph, graph_context)
end end)
end end
defp do_delete(%__MODULE__{name: name, graphs: graphs} = dataset, defp do_delete(%__MODULE__{name: name, graphs: graphs} = dataset, graph_context, statements) do
graph_context, statements) do
with graph_context = coerce_graph_name(graph_context), with graph_context = coerce_graph_name(graph_context),
graph when not is_nil(graph) <- graphs[graph_context], graph when not is_nil(graph) <- graphs[graph_context],
new_graph = Graph.delete(graph, statements) new_graph = Graph.delete(graph, statements) do
do %__MODULE__{
%__MODULE__{name: name, name: name,
graphs: graphs:
if Enum.empty?(new_graph) do if Enum.empty?(new_graph) do
Map.delete(graphs, graph_context) Map.delete(graphs, graph_context)
@ -367,17 +386,16 @@ defmodule RDF.Dataset do
end end
end end
@doc """ @doc """
Deletes the given graph. Deletes the given graph.
""" """
@spec delete_graph(t, Statement.graph_name | [Statement.graph_name] | nil) :: t @spec delete_graph(t, Statement.graph_name() | [Statement.graph_name()] | nil) :: t
def delete_graph(graph, graph_names) def delete_graph(graph, graph_names)
def delete_graph(%__MODULE__{} = dataset, graph_names) when is_list(graph_names) do def delete_graph(%__MODULE__{} = dataset, graph_names) when is_list(graph_names) do
Enum.reduce graph_names, dataset, fn (graph_name, dataset) -> Enum.reduce(graph_names, dataset, fn graph_name, dataset ->
delete_graph(dataset, graph_name) delete_graph(dataset, graph_name)
end end)
end end
def delete_graph(%__MODULE__{name: name, graphs: graphs}, graph_name) do def delete_graph(%__MODULE__{name: name, graphs: graphs}, graph_name) do
@ -393,7 +411,6 @@ defmodule RDF.Dataset do
def delete_default_graph(%__MODULE__{} = graph), def delete_default_graph(%__MODULE__{} = graph),
do: delete_graph(graph, nil) do: delete_graph(graph, nil)
@doc """ @doc """
Fetches the `RDF.Graph` with the given name. Fetches the `RDF.Graph` with the given name.
@ -410,7 +427,7 @@ defmodule RDF.Dataset do
:error :error
""" """
@impl Access @impl Access
@spec fetch(t, Statement.graph_name | nil) :: {:ok, Graph.t} | :error @spec fetch(t, Statement.graph_name() | nil) :: {:ok, Graph.t()} | :error
def fetch(%__MODULE__{graphs: graphs}, graph_name) do def fetch(%__MODULE__{graphs: graphs}, graph_name) do
Access.fetch(graphs, coerce_graph_name(graph_name)) Access.fetch(graphs, coerce_graph_name(graph_name))
end end
@ -433,36 +450,34 @@ defmodule RDF.Dataset do
iex> RDF.Dataset.get(dataset, EX.Foo, :bar) iex> RDF.Dataset.get(dataset, EX.Foo, :bar)
:bar :bar
""" """
@spec get(t, Statement.graph_name | nil, Graph.t | nil) :: Graph.t | nil @spec get(t, Statement.graph_name() | nil, Graph.t() | nil) :: Graph.t() | nil
def get(%__MODULE__{} = dataset, graph_name, default \\ nil) do def get(%__MODULE__{} = dataset, graph_name, default \\ nil) do
case fetch(dataset, graph_name) do case fetch(dataset, graph_name) do
{:ok, value} -> value {:ok, value} -> value
:error -> default :error -> default
end end
end end
@doc """ @doc """
The graph with given name. The graph with given name.
""" """
@spec graph(t, Statement.graph_name | nil) :: Graph.t @spec graph(t, Statement.graph_name() | nil) :: Graph.t()
def graph(%__MODULE__{graphs: graphs}, graph_name), def graph(%__MODULE__{graphs: graphs}, graph_name),
do: Map.get(graphs, coerce_graph_name(graph_name)) do: Map.get(graphs, coerce_graph_name(graph_name))
@doc """ @doc """
The default graph of a `RDF.Dataset`. The default graph of a `RDF.Dataset`.
""" """
@spec default_graph(t) :: Graph.t @spec default_graph(t) :: Graph.t()
def default_graph(%__MODULE__{graphs: graphs}), def default_graph(%__MODULE__{graphs: graphs}),
do: Map.get(graphs, nil, Graph.new) do: Map.get(graphs, nil, Graph.new())
@doc """ @doc """
The set of all graphs. The set of all graphs.
""" """
@spec graphs(t) :: [Graph.t] @spec graphs(t) :: [Graph.t()]
def graphs(%__MODULE__{graphs: graphs}), do: Map.values(graphs) def graphs(%__MODULE__{graphs: graphs}), do: Map.values(graphs)
@doc """ @doc """
Gets and updates the graph with the given name, in a single pass. Gets and updates the graph with the given name, in a single pass.
@ -486,37 +501,43 @@ defmodule RDF.Dataset do
{RDF.Graph.new({EX.S, EX.P, EX.O}, name: EX.Graph), RDF.Dataset.new({EX.S, EX.P, EX.NEW, EX.Graph})} {RDF.Graph.new({EX.S, EX.P, EX.O}, name: EX.Graph), RDF.Dataset.new({EX.S, EX.P, EX.NEW, EX.Graph})}
""" """
@impl Access @impl Access
@spec get_and_update(t, Statement.graph_name | nil, update_graph_fun) :: {Graph.t, input} @spec get_and_update(t, Statement.graph_name() | nil, update_graph_fun) :: {Graph.t(), input}
def get_and_update(%__MODULE__{} = dataset, graph_name, fun) do def get_and_update(%__MODULE__{} = dataset, graph_name, fun) do
with graph_context = coerce_graph_name(graph_name) do with graph_context = coerce_graph_name(graph_name) do
case fun.(get(dataset, graph_context)) do case fun.(get(dataset, graph_context)) do
{old_graph, new_graph} -> {old_graph, new_graph} ->
{old_graph, put(dataset, new_graph, graph_context)} {old_graph, put(dataset, new_graph, graph_context)}
:pop -> :pop ->
pop(dataset, graph_context) pop(dataset, graph_context)
other -> other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" raise "the given function must return a two-element tuple or :pop, got: #{
inspect(other)
}"
end end
end end
end end
@doc """ @doc """
Pops an arbitrary statement from a `RDF.Dataset`. Pops an arbitrary statement from a `RDF.Dataset`.
""" """
@spec pop(t) :: {Statement.t | nil, t} @spec pop(t) :: {Statement.t() | nil, t}
def pop(dataset) def pop(dataset)
def pop(%__MODULE__{graphs: graphs} = dataset) def pop(%__MODULE__{graphs: graphs} = dataset)
when graphs == %{}, do: {nil, dataset} when graphs == %{},
do: {nil, dataset}
def pop(%__MODULE__{name: name, graphs: graphs}) do def pop(%__MODULE__{name: name, graphs: graphs}) do
# TODO: Find a faster way ... # TODO: Find a faster way ...
[{graph_name, graph}] = Enum.take(graphs, 1) [{graph_name, graph}] = Enum.take(graphs, 1)
{{s, p, o}, popped_graph} = Graph.pop(graph) {{s, p, o}, popped_graph} = Graph.pop(graph)
popped = if Enum.empty?(popped_graph),
do: graphs |> Map.delete(graph_name), popped =
else: graphs |> Map.put(graph_name, popped_graph) if Enum.empty?(popped_graph),
do: graphs |> Map.delete(graph_name),
else: graphs |> Map.put(graph_name, popped_graph)
{{s, p, o, graph_name}, %__MODULE__{name: name, graphs: popped}} {{s, p, o, graph_name}, %__MODULE__{name: name, graphs: popped}}
end end
@ -538,18 +559,17 @@ defmodule RDF.Dataset do
{nil, dataset} {nil, dataset}
""" """
@impl Access @impl Access
@spec pop(t, Statement.coercible_graph_name) :: {Statement.t | nil, t} @spec pop(t, Statement.coercible_graph_name()) :: {Statement.t() | nil, t}
def pop(%__MODULE__{name: name, graphs: graphs} = dataset, graph_name) do def pop(%__MODULE__{name: name, graphs: graphs} = dataset, graph_name) do
case Access.pop(graphs, coerce_graph_name(graph_name)) do case Access.pop(graphs, coerce_graph_name(graph_name)) do
{nil, _} -> {nil, _} ->
{nil, dataset} {nil, dataset}
{graph, new_graphs} -> {graph, new_graphs} ->
{graph, %__MODULE__{name: name, graphs: new_graphs}} {graph, %__MODULE__{name: name, graphs: new_graphs}}
end end
end end
@doc """ @doc """
The number of statements within a `RDF.Dataset`. The number of statements within a `RDF.Dataset`.
@ -564,9 +584,9 @@ defmodule RDF.Dataset do
""" """
@spec statement_count(t) :: non_neg_integer @spec statement_count(t) :: non_neg_integer
def statement_count(%__MODULE__{graphs: graphs}) do def statement_count(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, 0, fn ({_, graph}, count) -> Enum.reduce(graphs, 0, fn {_, graph}, count ->
count + Graph.triple_count(graph) count + Graph.triple_count(graph)
end end)
end end
@doc """ @doc """
@ -582,9 +602,9 @@ defmodule RDF.Dataset do
MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2)]) MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2)])
""" """
def subjects(%__MODULE__{graphs: graphs}) do def subjects(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, subjects) -> Enum.reduce(graphs, MapSet.new(), fn {_, graph}, subjects ->
MapSet.union(subjects, Graph.subjects(graph)) MapSet.union(subjects, Graph.subjects(graph))
end end)
end end
@doc """ @doc """
@ -600,9 +620,9 @@ defmodule RDF.Dataset do
MapSet.new([EX.p1, EX.p2]) MapSet.new([EX.p1, EX.p2])
""" """
def predicates(%__MODULE__{graphs: graphs}) do def predicates(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, predicates) -> Enum.reduce(graphs, MapSet.new(), fn {_, graph}, predicates ->
MapSet.union(predicates, Graph.predicates(graph)) MapSet.union(predicates, Graph.predicates(graph))
end end)
end end
@doc """ @doc """
@ -622,9 +642,9 @@ defmodule RDF.Dataset do
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)]) MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
""" """
def objects(%__MODULE__{graphs: graphs}) do def objects(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, objects) -> Enum.reduce(graphs, MapSet.new(), fn {_, graph}, objects ->
MapSet.union(objects, Graph.objects(graph)) MapSet.union(objects, Graph.objects(graph))
end end)
end end
@doc """ @doc """
@ -642,9 +662,9 @@ defmodule RDF.Dataset do
RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2]) RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2])
""" """
def resources(%__MODULE__{graphs: graphs}) do def resources(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, resources) -> Enum.reduce(graphs, MapSet.new(), fn {_, graph}, resources ->
MapSet.union(resources, Graph.resources(graph)) MapSet.union(resources, Graph.resources(graph))
end end)
end end
@doc """ @doc """
@ -661,19 +681,19 @@ defmodule RDF.Dataset do
{RDF.iri(EX.S1), RDF.iri(EX.p2), RDF.iri(EX.O3)}, {RDF.iri(EX.S1), RDF.iri(EX.p2), RDF.iri(EX.O3)},
{RDF.iri(EX.S2), RDF.iri(EX.p2), RDF.iri(EX.O2)}] {RDF.iri(EX.S2), RDF.iri(EX.p2), RDF.iri(EX.O2)}]
""" """
@spec statements(t) :: [Statement.t] @spec statements(t) :: [Statement.t()]
def statements(%__MODULE__{graphs: graphs}) do def statements(%__MODULE__{graphs: graphs}) do
Enum.reduce graphs, [], fn ({_, graph}, all_statements) -> Enum.reduce(graphs, [], fn {_, graph}, all_statements ->
statements = Graph.triples(graph) statements = Graph.triples(graph)
if graph.name do if graph.name do
Enum.map statements, fn {s, p, o} -> {s, p, o, graph.name} end Enum.map(statements, fn {s, p, o} -> {s, p, o, graph.name} end)
else else
statements statements
end ++ all_statements end ++ all_statements
end end)
end end
@doc """ @doc """
Returns if a given statement is in a `RDF.Dataset`. Returns if a given statement is in a `RDF.Dataset`.
@ -686,7 +706,7 @@ defmodule RDF.Dataset do
...> RDF.Dataset.include?(dataset, {EX.S1, EX.p1, EX.O1, EX.Graph}) ...> RDF.Dataset.include?(dataset, {EX.S1, EX.p1, EX.O1, EX.Graph})
true true
""" """
@spec include?(t, Statement.t, Statement.coercible_graph_name | nil) :: boolean @spec include?(t, Statement.t(), Statement.coercible_graph_name() | nil) :: boolean
def include?(dataset, statement, graph_context \\ nil) def include?(dataset, statement, graph_context \\ nil)
def include?(%__MODULE__{graphs: graphs}, triple = {_, _, _}, graph_context) do def include?(%__MODULE__{graphs: graphs}, triple = {_, _, _}, graph_context) do
@ -702,7 +722,6 @@ defmodule RDF.Dataset do
def include?(%__MODULE__{} = dataset, {subject, predicate, object, graph_context}, _), def include?(%__MODULE__{} = dataset, {subject, predicate, object, graph_context}, _),
do: include?(dataset, {subject, predicate, object}, graph_context) do: include?(dataset, {subject, predicate, object}, graph_context)
@doc """ @doc """
Checks if a graph of a `RDF.Dataset` contains statements about the given resource. Checks if a graph of a `RDF.Dataset` contains statements about the given resource.
@ -713,7 +732,7 @@ defmodule RDF.Dataset do
iex> RDF.Dataset.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Dataset.describes?(EX.S2) iex> RDF.Dataset.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Dataset.describes?(EX.S2)
false false
""" """
@spec describes?(t, Statement.t, Statement.coercible_graph_name | nil) :: boolean @spec describes?(t, Statement.t(), Statement.coercible_graph_name() | nil) :: boolean
def describes?(%__MODULE__{graphs: graphs}, subject, graph_context \\ nil) do def describes?(%__MODULE__{graphs: graphs}, subject, graph_context \\ nil) do
with graph_context = coerce_graph_name(graph_context) do with graph_context = coerce_graph_name(graph_context) do
if graph = graphs[graph_context] do if graph = graphs[graph_context] do
@ -736,17 +755,16 @@ defmodule RDF.Dataset do
...> RDF.Dataset.who_describes(dataset, EX.S1) ...> RDF.Dataset.who_describes(dataset, EX.S1)
[nil, RDF.iri(EX.Graph1)] [nil, RDF.iri(EX.Graph1)]
""" """
@spec who_describes(t, Statement.coercible_subject) :: [Graph.t] @spec who_describes(t, Statement.coercible_subject()) :: [Graph.t()]
def who_describes(%__MODULE__{graphs: graphs}, subject) do def who_describes(%__MODULE__{graphs: graphs}, subject) do
with subject = coerce_subject(subject) do with subject = coerce_subject(subject) do
graphs graphs
|> Map.values |> Map.values()
|> Stream.filter(&Graph.describes?(&1, subject)) |> Stream.filter(&Graph.describes?(&1, subject))
|> Enum.map(&(&1.name)) |> Enum.map(& &1.name)
end end
end end
@doc """ @doc """
Returns a nested map of the native Elixir values of a `RDF.Dataset`. Returns a nested map of the native Elixir values of a `RDF.Dataset`.
@ -799,16 +817,15 @@ defmodule RDF.Dataset do
} }
""" """
@spec values(t, Statement.term_mapping) :: map @spec values(t, Statement.term_mapping()) :: map
def values(dataset, mapping \\ &RDF.Statement.default_term_mapping/1) def values(dataset, mapping \\ &RDF.Statement.default_term_mapping/1)
def values(%__MODULE__{graphs: graphs}, mapping) do def values(%__MODULE__{graphs: graphs}, mapping) do
Map.new graphs, fn {graph_name, graph} -> Map.new(graphs, fn {graph_name, graph} ->
{mapping.({:graph_name, graph_name}), Graph.values(graph, mapping)} {mapping.({:graph_name, graph_name}), Graph.values(graph, mapping)}
end end)
end end
@doc """ @doc """
Checks if two `RDF.Dataset`s are equal. Checks if two `RDF.Dataset`s are equal.
@ -825,24 +842,25 @@ defmodule RDF.Dataset do
def equal?(_, _), do: false def equal?(_, _), do: false
defp clear_metadata(%__MODULE__{graphs: graphs} = dataset) do defp clear_metadata(%__MODULE__{graphs: graphs} = dataset) do
%__MODULE__{dataset | %__MODULE__{
graphs: dataset
Map.new(graphs, fn {name, graph} -> | graphs:
{name, RDF.Graph.clear_metadata(graph)} Map.new(graphs, fn {name, graph} ->
end) {name, RDF.Graph.clear_metadata(graph)}
end)
} }
end end
defimpl Enumerable do defimpl Enumerable do
alias RDF.Dataset alias RDF.Dataset
def member?(dataset, statement), do: {:ok, Dataset.include?(dataset, statement)} def member?(dataset, statement), do: {:ok, Dataset.include?(dataset, statement)}
def count(dataset), do: {:ok, Dataset.statement_count(dataset)} def count(dataset), do: {:ok, Dataset.statement_count(dataset)}
def slice(_dataset), do: {:error, __MODULE__} def slice(_dataset), do: {:error, __MODULE__}
def reduce(%Dataset{graphs: graphs}, {:cont, acc}, _fun) def reduce(%Dataset{graphs: graphs}, {:cont, acc}, _fun)
when map_size(graphs) == 0, do: {:done, acc} when map_size(graphs) == 0,
do: {:done, acc}
def reduce(%Dataset{} = dataset, {:cont, acc}, fun) do def reduce(%Dataset{} = dataset, {:cont, acc}, fun) do
{statement, rest} = Dataset.pop(dataset) {statement, rest} = Dataset.pop(dataset)
@ -850,26 +868,31 @@ defmodule RDF.Dataset do
end end
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(dataset = %Dataset{}, {:suspend, acc}, fun) do def reduce(dataset = %Dataset{}, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(dataset, &1, fun)} {:suspended, acc, &reduce(dataset, &1, fun)}
end end
end end
defimpl Collectable do defimpl Collectable do
alias RDF.Dataset alias RDF.Dataset
def into(original) do def into(original) do
collector_fun = fn collector_fun = fn
dataset, {:cont, list} when is_list(list) dataset, {:cont, list} when is_list(list) ->
-> Dataset.add(dataset, List.to_tuple(list)) Dataset.add(dataset, List.to_tuple(list))
dataset, {:cont, elem} -> Dataset.add(dataset, elem)
dataset, :done -> dataset dataset, {:cont, elem} ->
_dataset, :halt -> :ok Dataset.add(dataset, elem)
dataset, :done ->
dataset
_dataset, :halt ->
:ok
end end
{original, collector_fun} {original, collector_fun}
end end
end end
end end

View File

@ -15,51 +15,56 @@ defmodule RDF.Description do
import RDF.Statement import RDF.Statement
alias RDF.{Statement, Triple} alias RDF.{Statement, Triple}
@type predications :: %{Statement.predicate => %{Statement.object => nil}} @type predications :: %{Statement.predicate() => %{Statement.object() => nil}}
@type statements :: @type statements ::
{Statement.coercible_predicate, {Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_predicate]} Statement.coercible_object() | [Statement.coercible_predicate()]}
| Statement.t | Statement.t()
| predications | predications
| t | t
@type t :: %__MODULE__{ @type t :: %__MODULE__{
subject: Statement.subject, subject: Statement.subject(),
predications: predications predications: predications
} }
@enforce_keys [:subject] @enforce_keys [:subject]
defstruct subject: nil, predications: %{} defstruct subject: nil, predications: %{}
@doc """ @doc """
Creates a new `RDF.Description` about the given subject with optional initial statements. Creates a new `RDF.Description` about the given subject with optional initial statements.
When given a list of statements, the first one must contain a subject. When given a list of statements, the first one must contain a subject.
""" """
@spec new(Statement.coercible_subject | statements | [statements]) :: t @spec new(Statement.coercible_subject() | statements | [statements]) :: t
def new(subject) def new(subject)
def new({subject, predicate, object}), def new({subject, predicate, object}),
do: new(subject) |> add(predicate, object) do: new(subject) |> add(predicate, object)
def new([statement | more_statements]), def new([statement | more_statements]),
do: new(statement) |> add(more_statements) do: new(statement) |> add(more_statements)
def new(%__MODULE__{} = description), def new(%__MODULE__{} = description),
do: description do: description
def new(subject), def new(subject),
do: %__MODULE__{subject: coerce_subject(subject)} do: %__MODULE__{subject: coerce_subject(subject)}
@doc """ @doc """
Creates a new `RDF.Description` about the given subject with optional initial statements. Creates a new `RDF.Description` about the given subject with optional initial statements.
""" """
@spec new(Statement.coercible_subject, statements | [statements]) :: t @spec new(Statement.coercible_subject(), statements | [statements]) :: t
def new(subject, {predicate, objects}), def new(subject, {predicate, objects}),
do: new(subject) |> add(predicate, objects) do: new(subject) |> add(predicate, objects)
def new(subject, statements) when is_list(statements), def new(subject, statements) when is_list(statements),
do: new(subject) |> add(statements) do: new(subject) |> add(statements)
def new(subject, %__MODULE__{predications: predications}), def new(subject, %__MODULE__{predications: predications}),
do: %__MODULE__{new(subject) | predications: predications} do: %__MODULE__{new(subject) | predications: predications}
def new(subject, predications = %{}), def new(subject, predications = %{}),
do: new(subject) |> add(predications) do: new(subject) |> add(predications)
@ -67,16 +72,16 @@ defmodule RDF.Description do
Creates a new `RDF.Description` about the given subject with optional initial statements. Creates a new `RDF.Description` about the given subject with optional initial statements.
""" """
@spec new( @spec new(
Statement.coercible_subject | statements | [statements], Statement.coercible_subject() | statements | [statements],
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def new(%__MODULE__{} = description, predicate, objects), def new(%__MODULE__{} = description, predicate, objects),
do: add(description, predicate, objects) do: add(description, predicate, objects)
def new(subject, predicate, objects), def new(subject, predicate, objects),
do: new(subject) |> add(predicate, objects) do: new(subject) |> add(predicate, objects)
@doc """ @doc """
Add objects to a predicate of a `RDF.Description`. Add objects to a predicate of a `RDF.Description`.
@ -89,29 +94,28 @@ defmodule RDF.Description do
""" """
@spec add( @spec add(
t, t,
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def add(description, predicate, objects) def add(description, predicate, objects)
def add(description, predicate, objects) when is_list(objects) do def add(description, predicate, objects) when is_list(objects) do
Enum.reduce objects, description, fn (object, description) -> Enum.reduce(objects, description, fn object, description ->
add(description, predicate, object) add(description, predicate, object)
end end)
end end
def add(%__MODULE__{subject: subject, predications: predications}, predicate, object) do def add(%__MODULE__{subject: subject, predications: predications}, predicate, object) do
with triple_predicate = coerce_predicate(predicate), with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object), triple_object = coerce_object(object),
new_predications = Map.update(predications, new_predications =
triple_predicate, %{triple_object => nil}, fn objects -> Map.update(predications, triple_predicate, %{triple_object => nil}, fn objects ->
Map.put_new(objects, triple_object, nil) Map.put_new(objects, triple_object, nil)
end) do end) do
%__MODULE__{subject: subject, predications: new_predications} %__MODULE__{subject: subject, predications: new_predications}
end end
end end
@doc """ @doc """
Adds statements to a `RDF.Description`. Adds statements to a `RDF.Description`.
@ -128,7 +132,7 @@ defmodule RDF.Description do
def add(description = %__MODULE__{}, {subject, predicate, object}) do def add(description = %__MODULE__{}, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject, if coerce_subject(subject) == description.subject,
do: add(description, predicate, object), do: add(description, predicate, object),
else: description else: description
end end
@ -136,25 +140,29 @@ defmodule RDF.Description do
do: add(description, {subject, predicate, object}) do: add(description, {subject, predicate, object})
def add(description, statements) when is_list(statements) do def add(description, statements) when is_list(statements) do
Enum.reduce statements, description, fn (statement, description) -> Enum.reduce(statements, description, fn statement, description ->
add(description, statement) add(description, statement)
end end)
end end
def add(%__MODULE__{subject: subject, predications: predications}, def add(
%__MODULE__{predications: other_predications}) do %__MODULE__{subject: subject, predications: predications},
merged_predications = Map.merge predications, other_predications, %__MODULE__{predications: other_predications}
fn (_, objects, other_objects) -> Map.merge(objects, other_objects) end ) do
merged_predications =
Map.merge(predications, other_predications, fn _, objects, other_objects ->
Map.merge(objects, other_objects)
end)
%__MODULE__{subject: subject, predications: merged_predications} %__MODULE__{subject: subject, predications: merged_predications}
end end
def add(description = %__MODULE__{}, predications = %{}) do def add(description = %__MODULE__{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) -> Enum.reduce(predications, description, fn {predicate, objects}, description ->
add(description, predicate, objects) add(description, predicate, objects)
end end)
end end
@doc """ @doc """
Puts objects to a predicate of a `RDF.Description`, overwriting all existing objects. Puts objects to a predicate of a `RDF.Description`, overwriting all existing objects.
@ -167,18 +175,22 @@ defmodule RDF.Description do
""" """
@spec put( @spec put(
t, t,
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def put(description, predicate, objects) def put(description, predicate, objects)
def put(%__MODULE__{subject: subject, predications: predications}, def put(%__MODULE__{subject: subject, predications: predications}, predicate, objects)
predicate, objects) when is_list(objects) do when is_list(objects) do
with triple_predicate = coerce_predicate(predicate), with triple_predicate = coerce_predicate(predicate),
triple_objects = Enum.reduce(objects, %{}, fn (object, acc) -> triple_objects =
Map.put_new(acc, coerce_object(object), nil) end), Enum.reduce(objects, %{}, fn object, acc ->
do: %__MODULE__{subject: subject, Map.put_new(acc, coerce_object(object), nil)
predications: Map.put(predications, triple_predicate, triple_objects)} end),
do: %__MODULE__{
subject: subject,
predications: Map.put(predications, triple_predicate, triple_objects)
}
end end
def put(%__MODULE__{} = description, predicate, object), def put(%__MODULE__{} = description, predicate, object),
@ -209,7 +221,7 @@ defmodule RDF.Description do
def put(%__MODULE__{} = description, {subject, predicate, object}) do def put(%__MODULE__{} = description, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject, if coerce_subject(subject) == description.subject,
do: put(description, predicate, object), do: put(description, predicate, object),
else: description else: description
end end
@ -219,53 +231,62 @@ defmodule RDF.Description do
def put(%__MODULE__{subject: subject} = description, statements) when is_list(statements) do def put(%__MODULE__{subject: subject} = description, statements) when is_list(statements) do
statements statements
|> Stream.map(fn |> Stream.map(fn
{p, o} -> {coerce_predicate(p), o} {p, o} ->
{^subject, p, o} -> {coerce_predicate(p), o} {coerce_predicate(p), o}
{s, p, o} ->
if coerce_subject(s) == subject, {^subject, p, o} ->
do: {coerce_predicate(p), o} {coerce_predicate(p), o}
bad -> raise ArgumentError, "#{inspect bad} is not a valid statement"
end) {s, p, o} ->
|> Stream.filter(&(&1)) # filter nil values if coerce_subject(s) == subject,
|> Enum.group_by(&(elem(&1, 0)), &(elem(&1, 1))) do: {coerce_predicate(p), o}
|> Enum.reduce(description, fn ({predicate, objects}, description) ->
put(description, predicate, objects) bad ->
end) raise ArgumentError, "#{inspect(bad)} is not a valid statement"
end)
# filter nil values
|> Stream.filter(& &1)
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.reduce(description, fn {predicate, objects}, description ->
put(description, predicate, objects)
end)
end end
def put(%__MODULE__{subject: subject, predications: predications}, def put(
%__MODULE__{predications: other_predications}) do %__MODULE__{subject: subject, predications: predications},
merged_predications = Map.merge predications, other_predications, %__MODULE__{predications: other_predications}
fn (_, _, other_objects) -> other_objects end ) do
merged_predications =
Map.merge(predications, other_predications, fn _, _, other_objects -> other_objects end)
%__MODULE__{subject: subject, predications: merged_predications} %__MODULE__{subject: subject, predications: merged_predications}
end end
def put(description = %__MODULE__{}, predications = %{}) do def put(description = %__MODULE__{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) -> Enum.reduce(predications, description, fn {predicate, objects}, description ->
put(description, predicate, objects) put(description, predicate, objects)
end end)
end end
@doc """ @doc """
Deletes statements from a `RDF.Description`. Deletes statements from a `RDF.Description`.
""" """
@spec delete( @spec delete(
t, t,
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def delete(description, predicate, objects) def delete(description, predicate, objects)
def delete(description, predicate, objects) when is_list(objects) do def delete(description, predicate, objects) when is_list(objects) do
Enum.reduce objects, description, fn (object, description) -> Enum.reduce(objects, description, fn object, description ->
delete(description, predicate, object) delete(description, predicate, object)
end end)
end end
def delete(%__MODULE__{subject: subject, predications: predications} = descr, predicate, object) do def delete(%__MODULE__{subject: subject, predications: predications} = descr, predicate, object) do
with triple_predicate = coerce_predicate(predicate), with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object) do triple_object = coerce_object(object) do
if (objects = predications[triple_predicate]) && Map.has_key?(objects, triple_object) do if (objects = predications[triple_predicate]) && Map.has_key?(objects, triple_object) do
%__MODULE__{ %__MODULE__{
subject: subject, subject: subject,
@ -274,10 +295,10 @@ defmodule RDF.Description do
Map.delete(predications, triple_predicate) Map.delete(predications, triple_predicate)
else else
Map.update!(predications, triple_predicate, fn objects -> Map.update!(predications, triple_predicate, fn objects ->
Map.delete(objects, triple_object) Map.delete(objects, triple_object)
end) end)
end end
} }
else else
descr descr
end end
@ -300,7 +321,7 @@ defmodule RDF.Description do
def delete(description = %__MODULE__{}, {subject, predicate, object}) do def delete(description = %__MODULE__{}, {subject, predicate, object}) do
if coerce_subject(subject) == description.subject, if coerce_subject(subject) == description.subject,
do: delete(description, predicate, object), do: delete(description, predicate, object),
else: description else: description
end end
@ -308,34 +329,34 @@ defmodule RDF.Description do
do: delete(description, {subject, predicate, object}) do: delete(description, {subject, predicate, object})
def delete(description, statements) when is_list(statements) do def delete(description, statements) when is_list(statements) do
Enum.reduce statements, description, fn (statement, description) -> Enum.reduce(statements, description, fn statement, description ->
delete(description, statement) delete(description, statement)
end end)
end end
def delete(description = %__MODULE__{}, other_description = %__MODULE__{}) do def delete(description = %__MODULE__{}, other_description = %__MODULE__{}) do
Enum.reduce other_description, description, fn ({_, predicate, object}, description) -> Enum.reduce(other_description, description, fn {_, predicate, object}, description ->
delete(description, predicate, object) delete(description, predicate, object)
end end)
end end
def delete(description = %__MODULE__{}, predications = %{}) do def delete(description = %__MODULE__{}, predications = %{}) do
Enum.reduce predications, description, fn ({predicate, objects}, description) -> Enum.reduce(predications, description, fn {predicate, objects}, description ->
delete(description, predicate, objects) delete(description, predicate, objects)
end end)
end end
@doc """ @doc """
Deletes all statements with the given properties. Deletes all statements with the given properties.
""" """
@spec delete_predicates(t, Statement.coercible_predicate | [Statement.coercible_predicate]) :: t @spec delete_predicates(t, Statement.coercible_predicate() | [Statement.coercible_predicate()]) ::
t
def delete_predicates(description, properties) def delete_predicates(description, properties)
def delete_predicates(%__MODULE__{} = description, properties) when is_list(properties) do def delete_predicates(%__MODULE__{} = description, properties) when is_list(properties) do
Enum.reduce properties, description, fn (property, description) -> Enum.reduce(properties, description, fn property, description ->
delete_predicates(description, property) delete_predicates(description, property)
end end)
end end
def delete_predicates(%__MODULE__{subject: subject, predications: predications}, property) do def delete_predicates(%__MODULE__{subject: subject, predications: predications}, property) do
@ -344,7 +365,6 @@ defmodule RDF.Description do
end end
end end
@doc """ @doc """
Fetches the objects for the given predicate of a Description. Fetches the objects for the given predicate of a Description.
@ -361,7 +381,7 @@ defmodule RDF.Description do
:error :error
""" """
@impl Access @impl Access
@spec fetch(t, Statement.coercible_predicate) :: {:ok, [Statement.object]} | :error @spec fetch(t, Statement.coercible_predicate()) :: {:ok, [Statement.object()]} | :error
def fetch(%__MODULE__{predications: predications}, predicate) do def fetch(%__MODULE__{predications: predications}, predicate) do
with {:ok, objects} <- Access.fetch(predications, coerce_predicate(predicate)) do with {:ok, objects} <- Access.fetch(predications, coerce_predicate(predicate)) do
{:ok, Map.keys(objects)} {:ok, Map.keys(objects)}
@ -382,11 +402,11 @@ defmodule RDF.Description do
iex> RDF.Description.get(RDF.Description.new(EX.S), EX.foo, :bar) iex> RDF.Description.get(RDF.Description.new(EX.S), EX.foo, :bar)
:bar :bar
""" """
@spec get(t, Statement.coercible_predicate, any) :: [Statement.object] | any @spec get(t, Statement.coercible_predicate(), any) :: [Statement.object()] | any
def get(description = %__MODULE__{}, predicate, default \\ nil) do def get(description = %__MODULE__{}, predicate, default \\ nil) do
case fetch(description, predicate) do case fetch(description, predicate) do
{:ok, value} -> value {:ok, value} -> value
:error -> default :error -> default
end end
end end
@ -402,11 +422,11 @@ defmodule RDF.Description do
iex> RDF.Description.first(RDF.Description.new(EX.S), EX.foo) iex> RDF.Description.first(RDF.Description.new(EX.S), EX.foo)
nil nil
""" """
@spec first(t, Statement.coercible_predicate) :: Statement.object | nil @spec first(t, Statement.coercible_predicate()) :: Statement.object() | nil
def first(description = %__MODULE__{}, predicate) do def first(description = %__MODULE__{}, predicate) do
description description
|> get(predicate, []) |> get(predicate, [])
|> List.first |> List.first()
end end
@doc """ @doc """
@ -433,8 +453,8 @@ defmodule RDF.Description do
""" """
@spec update( @spec update(
t, t,
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | nil, Statement.coercible_object() | nil,
([Statement.Object] -> [Statement.Object]) ([Statement.Object] -> [Statement.Object])
) :: t ) :: t
def update(description = %__MODULE__{}, predicate, initial \\ nil, fun) do def update(description = %__MODULE__{}, predicate, initial \\ nil, fun) do
@ -453,13 +473,12 @@ defmodule RDF.Description do
|> fun.() |> fun.()
|> List.wrap() |> List.wrap()
|> case do |> case do
[] -> delete_predicates(description, predicate) [] -> delete_predicates(description, predicate)
objects -> put(description, predicate, objects) objects -> put(description, predicate, objects)
end end
end end
end end
@doc """ @doc """
Gets and updates the objects of the given predicate of a Description, in a single pass. Gets and updates the objects of the given predicate of a Description, in a single pass.
@ -488,7 +507,7 @@ defmodule RDF.Description do
@impl Access @impl Access
@spec get_and_update( @spec get_and_update(
t, t,
Statement.coercible_predicate, Statement.coercible_predicate(),
([Statement.Object] -> {[Statement.Object], t} | :pop) ([Statement.Object] -> {[Statement.Object], t} | :pop)
) :: {[Statement.Object], t} ) :: {[Statement.Object], t}
def get_and_update(description = %__MODULE__{}, predicate, fun) do def get_and_update(description = %__MODULE__{}, predicate, fun) do
@ -496,32 +515,34 @@ defmodule RDF.Description do
case fun.(get(description, triple_predicate)) do case fun.(get(description, triple_predicate)) do
{objects_to_return, new_objects} -> {objects_to_return, new_objects} ->
{objects_to_return, put(description, triple_predicate, new_objects)} {objects_to_return, put(description, triple_predicate, new_objects)}
:pop -> pop(description, triple_predicate)
:pop ->
pop(description, triple_predicate)
end end
end end
end end
@doc """ @doc """
Pops an arbitrary triple from a `RDF.Description`. Pops an arbitrary triple from a `RDF.Description`.
""" """
@spec pop(t) :: {Triple.t | [Statement.Object] | nil, t} @spec pop(t) :: {Triple.t() | [Statement.Object] | nil, t}
def pop(description) def pop(description)
def pop(description = %__MODULE__{predications: predications}) def pop(description = %__MODULE__{predications: predications})
when predications == %{}, do: {nil, description} when predications == %{},
do: {nil, description}
def pop(%__MODULE__{subject: subject, predications: predications}) do def pop(%__MODULE__{subject: subject, predications: predications}) do
# TODO: Find a faster way ... # TODO: Find a faster way ...
predicate = List.first(Map.keys(predications)) predicate = List.first(Map.keys(predications))
[{object, _}] = Enum.take(objects = predications[predicate], 1) [{object, _}] = Enum.take(objects = predications[predicate], 1)
popped = if Enum.count(objects) == 1, popped =
do: elem(Map.pop(predications, predicate), 1), if Enum.count(objects) == 1,
else: elem(pop_in(predications, [predicate, object]), 1) do: elem(Map.pop(predications, predicate), 1),
else: elem(pop_in(predications, [predicate, object]), 1)
{{subject, predicate, object}, {{subject, predicate, object}, %__MODULE__{subject: subject, predications: popped}}
%__MODULE__{subject: subject, predications: popped}}
end end
@doc """ @doc """
@ -541,12 +562,12 @@ defmodule RDF.Description do
case Access.pop(predications, coerce_predicate(predicate)) do case Access.pop(predications, coerce_predicate(predicate)) do
{nil, _} -> {nil, _} ->
{nil, description} {nil, description}
{objects, new_predications} -> {objects, new_predications} ->
{Map.keys(objects), %__MODULE__{subject: subject, predications: new_predications}} {Map.keys(objects), %__MODULE__{subject: subject, predications: new_predications}}
end end
end end
@doc """ @doc """
The set of all properties used in the predicates within a `RDF.Description`. The set of all properties used in the predicates within a `RDF.Description`.
@ -559,9 +580,9 @@ defmodule RDF.Description do
...> RDF.Description.predicates ...> RDF.Description.predicates
MapSet.new([EX.p1, EX.p2]) MapSet.new([EX.p1, EX.p2])
""" """
@spec predicates(t) :: MapSet.t @spec predicates(t) :: MapSet.t()
def predicates(%__MODULE__{predications: predications}), def predicates(%__MODULE__{predications: predications}),
do: predications |> Map.keys |> MapSet.new do: predications |> Map.keys() |> MapSet.new()
@doc """ @doc """
The set of all resources used in the objects within a `RDF.Description`. The set of all resources used in the objects within a `RDF.Description`.
@ -579,22 +600,22 @@ defmodule RDF.Description do
...> ]) |> RDF.Description.objects ...> ]) |> RDF.Description.objects
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)]) MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
""" """
@spec objects(t) :: MapSet.t @spec objects(t) :: MapSet.t()
def objects(%__MODULE__{} = description), def objects(%__MODULE__{} = description),
do: objects(description, &RDF.resource?/1) do: objects(description, &RDF.resource?/1)
@doc """ @doc """
The set of all resources used in the objects within a `RDF.Description` satisfying the given filter criterion. The set of all resources used in the objects within a `RDF.Description` satisfying the given filter criterion.
""" """
@spec objects(t, (Statement.object -> boolean)) :: MapSet.t @spec objects(t, (Statement.object() -> boolean)) :: MapSet.t()
def objects(%__MODULE__{predications: predications}, filter_fn) do def objects(%__MODULE__{predications: predications}, filter_fn) do
Enum.reduce predications, MapSet.new, fn ({_, objects}, acc) -> Enum.reduce(predications, MapSet.new(), fn {_, objects}, acc ->
objects objects
|> Map.keys |> Map.keys()
|> Enum.filter(filter_fn) |> Enum.filter(filter_fn)
|> MapSet.new |> MapSet.new()
|> MapSet.union(acc) |> MapSet.union(acc)
end end)
end end
@doc """ @doc """
@ -611,7 +632,7 @@ defmodule RDF.Description do
...> ]) |> RDF.Description.resources ...> ]) |> RDF.Description.resources
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2, EX.p3]) MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2, EX.p3])
""" """
@spec resources(t) :: MapSet.t @spec resources(t) :: MapSet.t()
def resources(description) do def resources(description) do
description description
|> objects |> objects
@ -626,42 +647,42 @@ defmodule RDF.Description do
defdelegate statements(description), to: __MODULE__, as: :triples defdelegate statements(description), to: __MODULE__, as: :triples
@doc """ @doc """
Returns the number of statements of a `RDF.Description`. Returns the number of statements of a `RDF.Description`.
""" """
@spec count(t) :: non_neg_integer @spec count(t) :: non_neg_integer
def count(%__MODULE__{predications: predications}) do def count(%__MODULE__{predications: predications}) do
Enum.reduce predications, 0, Enum.reduce(predications, 0, fn {_, objects}, count -> count + Enum.count(objects) end)
fn ({_, objects}, count) -> count + Enum.count(objects) end
end end
@doc """ @doc """
Checks if the given statement exists within a `RDF.Description`. Checks if the given statement exists within a `RDF.Description`.
""" """
@spec include?(t, statements) :: boolean @spec include?(t, statements) :: boolean
def include?(description, statement) def include?(description, statement)
def include?(%__MODULE__{predications: predications}, def include?(
{predicate, object}) do %__MODULE__{predications: predications},
{predicate, object}
) do
with triple_predicate = coerce_predicate(predicate), with triple_predicate = coerce_predicate(predicate),
triple_object = coerce_object(object) do triple_object = coerce_object(object) do
predications predications
|> Map.get(triple_predicate, %{}) |> Map.get(triple_predicate, %{})
|> Map.has_key?(triple_object) |> Map.has_key?(triple_object)
end end
end end
def include?(desc = %__MODULE__{subject: desc_subject}, def include?(
{subject, predicate, object}) do desc = %__MODULE__{subject: desc_subject},
{subject, predicate, object}
) do
coerce_subject(subject) == desc_subject && coerce_subject(subject) == desc_subject &&
include?(desc, {predicate, object}) include?(desc, {predicate, object})
end end
def include?(%__MODULE__{}, _), do: false def include?(%__MODULE__{}, _), do: false
@doc """ @doc """
Checks if a `RDF.Description` has the given resource as subject. Checks if a `RDF.Description` has the given resource as subject.
@ -672,7 +693,7 @@ defmodule RDF.Description do
iex> RDF.Description.new(EX.S1, EX.p1, EX.O1) |> RDF.Description.describes?(EX.S2) iex> RDF.Description.new(EX.S1, EX.p1, EX.O1) |> RDF.Description.describes?(EX.S2)
false false
""" """
@spec describes?(t, Statement.subject) :: boolean @spec describes?(t, Statement.subject()) :: boolean
def describes?(%__MODULE__{subject: subject}, other_subject) do def describes?(%__MODULE__{subject: subject}, other_subject) do
with other_subject = coerce_subject(other_subject) do with other_subject = coerce_subject(other_subject) do
subject == other_subject subject == other_subject
@ -712,16 +733,16 @@ defmodule RDF.Description do
%{p: ["Foo"]} %{p: ["Foo"]}
""" """
@spec values(t, Statement.term_mapping) :: map @spec values(t, Statement.term_mapping()) :: map
def values(description, mapping \\ &RDF.Statement.default_term_mapping/1) def values(description, mapping \\ &RDF.Statement.default_term_mapping/1)
def values(%__MODULE__{predications: predications}, mapping) do def values(%__MODULE__{predications: predications}, mapping) do
Map.new predications, fn {predicate, objects} -> Map.new(predications, fn {predicate, objects} ->
{ {
mapping.({:predicate, predicate}), mapping.({:predicate, predicate}),
objects |> Map.keys() |> Enum.map(&(mapping.({:object, &1}))) objects |> Map.keys() |> Enum.map(&mapping.({:object, &1}))
} }
end end)
end end
@doc """ @doc """
@ -731,13 +752,13 @@ defmodule RDF.Description do
If `nil` is passed, the description is left untouched. If `nil` is passed, the description is left untouched.
""" """
@spec take(t, [Statement.coercible_predicate] | Enum.t | nil) :: t @spec take(t, [Statement.coercible_predicate()] | Enum.t() | nil) :: t
def take(description, predicates) def take(description, predicates)
def take(%__MODULE__{} = description, nil), do: description def take(%__MODULE__{} = description, nil), do: description
def take(%__MODULE__{predications: predications} = description, predicates) do def take(%__MODULE__{predications: predications} = description, predicates) do
predicates = Enum.map(predicates, &(coerce_predicate/1)) predicates = Enum.map(predicates, &coerce_predicate/1)
%__MODULE__{description | predications: Map.take(predications, predicates)} %__MODULE__{description | predications: Map.take(predications, predicates)}
end end
@ -755,43 +776,48 @@ defmodule RDF.Description do
def equal?(_, _), do: false def equal?(_, _), do: false
defimpl Enumerable do defimpl Enumerable do
alias RDF.Description alias RDF.Description
def member?(desc, triple), do: {:ok, Description.include?(desc, triple)} def member?(desc, triple), do: {:ok, Description.include?(desc, triple)}
def count(desc), do: {:ok, Description.count(desc)} def count(desc), do: {:ok, Description.count(desc)}
def slice(_desc), do: {:error, __MODULE__} def slice(_desc), do: {:error, __MODULE__}
def reduce(%Description{predications: predications}, {:cont, acc}, _fun) def reduce(%Description{predications: predications}, {:cont, acc}, _fun)
when map_size(predications) == 0, do: {:done, acc} when map_size(predications) == 0,
do: {:done, acc}
def reduce(description = %Description{}, {:cont, acc}, fun) do def reduce(description = %Description{}, {:cont, acc}, fun) do
{triple, rest} = Description.pop(description) {triple, rest} = Description.pop(description)
reduce(rest, fun.(triple, acc), fun) reduce(rest, fun.(triple, acc), fun)
end end
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(description = %Description{}, {:suspend, acc}, fun) do def reduce(description = %Description{}, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(description, &1, fun)} {:suspended, acc, &reduce(description, &1, fun)}
end end
end end
defimpl Collectable do defimpl Collectable do
alias RDF.Description alias RDF.Description
def into(original) do def into(original) do
collector_fun = fn collector_fun = fn
description, {:cont, list} when is_list(list) description, {:cont, list} when is_list(list) ->
-> Description.add(description, List.to_tuple(list)) Description.add(description, List.to_tuple(list))
description, {:cont, elem} -> Description.add(description, elem)
description, :done -> description description, {:cont, elem} ->
_description, :halt -> :ok Description.add(description, elem)
description, :done ->
description
_description, :halt ->
:ok
end end
{original, collector_fun} {original, collector_fun}
end end
end end
end end

View File

@ -9,13 +9,12 @@ defmodule RDF.Diff do
alias RDF.{Description, Graph} alias RDF.{Description, Graph}
@type t :: %__MODULE__{ @type t :: %__MODULE__{
additions: Graph.t, additions: Graph.t(),
deletions: Graph.t deletions: Graph.t()
} }
defstruct [:additions, :deletions] defstruct [:additions, :deletions]
@doc """ @doc """
Creates a `RDF.Diff` struct. Creates a `RDF.Diff` struct.
@ -32,8 +31,10 @@ defmodule RDF.Diff do
end end
defp coerce_graph(nil), do: Graph.new() defp coerce_graph(nil), do: Graph.new()
defp coerce_graph(%Description{} = description), defp coerce_graph(%Description{} = description),
do: if Enum.empty?(description), do: Graph.new(), else: Graph.new(description) do: if(Enum.empty?(description), do: Graph.new(), else: Graph.new(description))
defp coerce_graph(data), do: Graph.new(data) defp coerce_graph(data), do: Graph.new(data)
@doc """ @doc """
@ -59,43 +60,49 @@ defmodule RDF.Diff do
deletions: RDF.graph({EX.S1, EX.p1, EX.O1}) deletions: RDF.graph({EX.S1, EX.p1, EX.O1})
} }
""" """
@spec diff(Description.t | Graph.t, Description.t | Graph.t) :: t @spec diff(Description.t() | Graph.t(), Description.t() | Graph.t()) :: t
def diff(original_rdf_data, new_rdf_data) def diff(original_rdf_data, new_rdf_data)
def diff(%Description{} = description, description), do: new() def diff(%Description{} = description, description), do: new()
def diff(%Description{subject: subject} = original_description, def diff(
%Description{subject: subject} = new_description) do %Description{subject: subject} = original_description,
%Description{subject: subject} = new_description
) do
{additions, deletions} = {additions, deletions} =
original_description original_description
|> Description.predicates() |> Description.predicates()
|> Enum.reduce({new_description, Description.new(subject)}, |> Enum.reduce(
fn property, {additions, deletions} -> {new_description, Description.new(subject)},
original_objects = Description.get(original_description, property) fn property, {additions, deletions} ->
case Description.get(new_description, property) do original_objects = Description.get(original_description, property)
nil ->
{
additions,
Description.add(deletions, property, original_objects)
}
new_objects -> case Description.get(new_description, property) do
{unchanged_objects, deleted_objects} = nil ->
Enum.reduce(original_objects, {[], []}, fn {
original_object, {unchanged_objects, deleted_objects} -> additions,
if original_object in new_objects do Description.add(deletions, property, original_objects)
{[original_object | unchanged_objects], deleted_objects} }
else
{unchanged_objects, [original_object | deleted_objects]} new_objects ->
end {unchanged_objects, deleted_objects} =
end) Enum.reduce(original_objects, {[], []}, fn
original_object, {unchanged_objects, deleted_objects} ->
if original_object in new_objects do
{[original_object | unchanged_objects], deleted_objects}
else
{unchanged_objects, [original_object | deleted_objects]}
end
end)
{
Description.delete(additions, property, unchanged_objects),
Description.add(deletions, property, deleted_objects)
}
end
end
)
{
Description.delete(additions, property, unchanged_objects),
Description.add(deletions, property, deleted_objects),
}
end
end)
new(additions: additions, deletions: deletions) new(additions: additions, deletions: deletions)
end end
@ -111,16 +118,20 @@ defmodule RDF.Diff do
graph1_subjects graph1_subjects
|> MapSet.intersection(graph2_subjects) |> MapSet.intersection(graph2_subjects)
|> Enum.reduce( |> Enum.reduce(
new( new(
additions: Graph.take(graph2, added_subjects), additions: Graph.take(graph2, added_subjects),
deletions: Graph.take(graph1, deleted_subjects) deletions: Graph.take(graph1, deleted_subjects)
), ),
fn subject, diff -> fn subject, diff ->
merge(diff, diff( merge(
Graph.description(graph1, subject), diff,
Graph.description(graph2, subject) diff(
)) Graph.description(graph1, subject),
end) Graph.description(graph2, subject)
)
)
end
)
end end
def diff(%Description{} = description, %Graph{} = graph) do def diff(%Description{} = description, %Graph{} = graph) do
@ -139,10 +150,7 @@ defmodule RDF.Diff do
def diff(%Graph{} = graph, %Description{} = description) do def diff(%Graph{} = graph, %Description{} = description) do
diff = diff(description, graph) diff = diff(description, graph)
%__MODULE__{ diff | %__MODULE__{diff | additions: diff.deletions, deletions: diff.additions}
additions: diff.deletions,
deletions: diff.additions
}
end end
@doc """ @doc """
@ -178,7 +186,7 @@ defmodule RDF.Diff do
The result of an application is always a `RDF.Graph`, even if a `RDF.Description` The result of an application is always a `RDF.Graph`, even if a `RDF.Description`
is given and the additions from the diff are all about the subject of this description. is given and the additions from the diff are all about the subject of this description.
""" """
@spec apply(t, Description.t | Graph.t) :: Graph.t @spec apply(t, Description.t() | Graph.t()) :: Graph.t()
def apply(diff, rdf_data) def apply(diff, rdf_data)
def apply(%__MODULE__{} = diff, %Graph{} = graph) do def apply(%__MODULE__{} = diff, %Graph{} = graph) do

View File

@ -38,7 +38,6 @@ defmodule RDF.Quad.InvalidGraphContextError do
end end
end end
defmodule RDF.Namespace.InvalidVocabBaseIRIError do defmodule RDF.Namespace.InvalidVocabBaseIRIError do
defexception [:message] defexception [:message]
end end
@ -55,7 +54,6 @@ defmodule RDF.Namespace.UndefinedTermError do
defexception [:message] defexception [:message]
end end
defmodule RDF.Query.InvalidError do defmodule RDF.Query.InvalidError do
defexception [:message] defexception [:message]
end end

View File

@ -16,24 +16,23 @@ defmodule RDF.Graph do
import RDF.Statement import RDF.Statement
alias RDF.{Description, IRI, PrefixMap, Statement} alias RDF.{Description, IRI, PrefixMap, Statement}
@type graph_description :: %{Statement.subject => Description.t} @type graph_description :: %{Statement.subject() => Description.t()}
@type t :: %__MODULE__{ @type t :: %__MODULE__{
name: IRI.t | nil, name: IRI.t() | nil,
descriptions: graph_description, descriptions: graph_description,
prefixes: PrefixMap.t | nil, prefixes: PrefixMap.t() | nil,
base_iri: IRI.t | nil base_iri: IRI.t() | nil
} }
@type input :: Statement.t | Description.t | t @type input :: Statement.t() | Description.t() | t
@type update_description_fun :: (Description.t -> Description.t) @type update_description_fun :: (Description.t() -> Description.t())
@type get_and_update_description_fun :: (Description.t -> {Description.t, input} | :pop) @type get_and_update_description_fun :: (Description.t() -> {Description.t(), input} | :pop)
defstruct name: nil, descriptions: %{}, prefixes: nil, base_iri: nil defstruct name: nil, descriptions: %{}, prefixes: nil, base_iri: nil
@doc """ @doc """
Creates an empty unnamed `RDF.Graph`. Creates an empty unnamed `RDF.Graph`.
""" """
@ -119,15 +118,14 @@ defmodule RDF.Graph do
See `new/2` for available arguments. See `new/2` for available arguments.
""" """
@spec new( @spec new(
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object], Statement.coercible_object() | [Statement.coercible_object()],
keyword keyword
) :: t ) :: t
def new(subject, predicate, objects, options \\ []), def new(subject, predicate, objects, options \\ []),
do: new([], options) |> add(subject, predicate, objects) do: new([], options) |> add(subject, predicate, objects)
@doc """ @doc """
Removes all triples from `graph`. Removes all triples from `graph`.
@ -140,15 +138,14 @@ defmodule RDF.Graph do
%__MODULE__{graph | descriptions: %{}} %__MODULE__{graph | descriptions: %{}}
end end
@doc """ @doc """
Adds triples to a `RDF.Graph`. Adds triples to a `RDF.Graph`.
""" """
@spec add( @spec add(
t, t,
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def add(%__MODULE__{} = graph, subject, predicate, objects), def add(%__MODULE__{} = graph, subject, predicate, objects),
do: add(graph, {subject, predicate, objects}) do: add(graph, {subject, predicate, objects})
@ -175,9 +172,9 @@ defmodule RDF.Graph do
do: add(graph, {subject, predicate, object}) do: add(graph, {subject, predicate, object})
def add(graph, triples) when is_list(triples) do def add(graph, triples) when is_list(triples) do
Enum.reduce triples, graph, fn (triple, graph) -> Enum.reduce(triples, graph, fn triple, graph ->
add(graph, triple) add(graph, triple)
end end)
end end
def add(%__MODULE__{} = graph, %Description{subject: subject} = description), def add(%__MODULE__{} = graph, %Description{subject: subject} = description),
@ -185,9 +182,9 @@ defmodule RDF.Graph do
def add(graph, %__MODULE__{descriptions: descriptions, prefixes: prefixes}) do def add(graph, %__MODULE__{descriptions: descriptions, prefixes: prefixes}) do
graph = graph =
Enum.reduce descriptions, graph, fn ({_, description}, graph) -> Enum.reduce(descriptions, graph, fn {_, description}, graph ->
add(graph, description) add(graph, description)
end end)
if prefixes do if prefixes do
add_prefixes(graph, prefixes, fn _, ns, _ -> ns end) add_prefixes(graph, prefixes, fn _, ns, _ -> ns end)
@ -197,16 +194,15 @@ defmodule RDF.Graph do
end end
defp do_add(%__MODULE__{descriptions: descriptions} = graph, subject, statements) do defp do_add(%__MODULE__{descriptions: descriptions} = graph, subject, statements) do
%__MODULE__{graph | %__MODULE__{
descriptions: graph
Map.update(descriptions, subject, Description.new(statements), | descriptions:
fn description -> Map.update(descriptions, subject, Description.new(statements), fn description ->
Description.add(description, statements) Description.add(description, statements)
end) end)
} }
end end
@doc """ @doc """
Adds statements to a `RDF.Graph` and overwrites all existing statements with the same subjects and predicates. Adds statements to a `RDF.Graph` and overwrites all existing statements with the same subjects and predicates.
@ -235,9 +231,9 @@ defmodule RDF.Graph do
def put(graph, %__MODULE__{descriptions: descriptions, prefixes: prefixes}) do def put(graph, %__MODULE__{descriptions: descriptions, prefixes: prefixes}) do
graph = graph =
Enum.reduce descriptions, graph, fn ({_, description}, graph) -> Enum.reduce(descriptions, graph, fn {_, description}, graph ->
put(graph, description) put(graph, description)
end end)
if prefixes do if prefixes do
add_prefixes(graph, prefixes, fn _, ns, _ -> ns end) add_prefixes(graph, prefixes, fn _, ns, _ -> ns end)
@ -247,31 +243,40 @@ defmodule RDF.Graph do
end end
def put(%__MODULE__{} = graph, statements) when is_map(statements) do def put(%__MODULE__{} = graph, statements) when is_map(statements) do
Enum.reduce statements, graph, fn ({subject, predications}, graph) -> Enum.reduce(statements, graph, fn {subject, predications}, graph ->
put(graph, subject, predications) put(graph, subject, predications)
end end)
end end
def put(%__MODULE__{} = graph, statements) when is_list(statements) do def put(%__MODULE__{} = graph, statements) when is_list(statements) do
put(graph, Enum.group_by(statements, &(elem(&1, 0)), fn {_, p, o} -> {p, o} end)) put(graph, Enum.group_by(statements, &elem(&1, 0), fn {_, p, o} -> {p, o} end))
end end
@doc """ @doc """
Add statements to a `RDF.Graph`, overwriting all statements with the same subject and predicate. Add statements to a `RDF.Graph`, overwriting all statements with the same subject and predicate.
""" """
@spec put(t, Statement.coercible_subject, Description.statements | [Description.statements]) :: t @spec put(
t,
Statement.coercible_subject(),
Description.statements() | [Description.statements()]
) :: t
def put(graph, subject, predications) def put(graph, subject, predications)
def put(%__MODULE__{descriptions: descriptions} = graph, subject, predications) def put(%__MODULE__{descriptions: descriptions} = graph, subject, predications)
when is_list(predications) do when is_list(predications) do
with subject = coerce_subject(subject) do with subject = coerce_subject(subject) do
# TODO: Can we reduce this case also to do_put somehow? Only the initializer of Map.update differs ... # TODO: Can we reduce this case also to do_put somehow? Only the initializer of Map.update differs ...
%__MODULE__{graph | %__MODULE__{
descriptions: graph
Map.update(descriptions, subject, Description.new(subject, predications), | descriptions:
fn current -> Map.update(
Description.put(current, predications) descriptions,
end) subject,
Description.new(subject, predications),
fn current ->
Description.put(current, predications)
end
)
} }
end end
end end
@ -280,10 +285,10 @@ defmodule RDF.Graph do
do: put(graph, subject, [predications]) do: put(graph, subject, [predications])
defp do_put(%__MODULE__{descriptions: descriptions} = graph, subject, statements) do defp do_put(%__MODULE__{descriptions: descriptions} = graph, subject, statements) do
%__MODULE__{graph | %__MODULE__{
descriptions: graph
Map.update(descriptions, subject, Description.new(statements), | descriptions:
fn current -> Map.update(descriptions, subject, Description.new(statements), fn current ->
Description.put(current, statements) Description.put(current, statements)
end) end)
} }
@ -302,22 +307,21 @@ defmodule RDF.Graph do
""" """
@spec put( @spec put(
t, t,
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def put(%__MODULE__{} = graph, subject, predicate, objects), def put(%__MODULE__{} = graph, subject, predicate, objects),
do: put(graph, {subject, predicate, objects}) do: put(graph, {subject, predicate, objects})
@doc """ @doc """
Deletes statements from a `RDF.Graph`. Deletes statements from a `RDF.Graph`.
""" """
@spec delete( @spec delete(
t, t,
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object | [Statement.coercible_object] Statement.coercible_object() | [Statement.coercible_object()]
) :: t ) :: t
def delete(graph, subject, predicate, object), def delete(graph, subject, predicate, object),
do: delete(graph, {subject, predicate, object}) do: delete(graph, {subject, predicate, object})
@ -341,52 +345,50 @@ defmodule RDF.Graph do
do: delete(graph, {subject, predicate, object}) do: delete(graph, {subject, predicate, object})
def delete(%__MODULE__{} = graph, triples) when is_list(triples) do def delete(%__MODULE__{} = graph, triples) when is_list(triples) do
Enum.reduce triples, graph, fn (triple, graph) -> Enum.reduce(triples, graph, fn triple, graph ->
delete(graph, triple) delete(graph, triple)
end end)
end end
def delete(%__MODULE__{} = graph, %Description{subject: subject} = description), def delete(%__MODULE__{} = graph, %Description{subject: subject} = description),
do: do_delete(graph, subject, description) do: do_delete(graph, subject, description)
def delete(%__MODULE__{} = graph, %__MODULE__{descriptions: descriptions}) do def delete(%__MODULE__{} = graph, %__MODULE__{descriptions: descriptions}) do
Enum.reduce descriptions, graph, fn ({_, description}, graph) -> Enum.reduce(descriptions, graph, fn {_, description}, graph ->
delete(graph, description) delete(graph, description)
end end)
end end
defp do_delete(%__MODULE__{descriptions: descriptions} = graph, defp do_delete(%__MODULE__{descriptions: descriptions} = graph, subject, statements) do
subject, statements) do
with description when not is_nil(description) <- descriptions[subject], with description when not is_nil(description) <- descriptions[subject],
new_description = Description.delete(description, statements) new_description = Description.delete(description, statements) do
do %__MODULE__{
%__MODULE__{graph | graph
descriptions: | descriptions:
if Enum.empty?(new_description) do if Enum.empty?(new_description) do
Map.delete(descriptions, subject) Map.delete(descriptions, subject)
else else
Map.put(descriptions, subject, new_description) Map.put(descriptions, subject, new_description)
end end
} }
else else
nil -> graph nil -> graph
end end
end end
@doc """ @doc """
Deletes all statements with the given subjects. Deletes all statements with the given subjects.
""" """
@spec delete_subjects( @spec delete_subjects(
t, t,
Statement.coercible_subject | [Statement.coercible_subject] Statement.coercible_subject() | [Statement.coercible_subject()]
) :: t ) :: t
def delete_subjects(graph, subjects) def delete_subjects(graph, subjects)
def delete_subjects(%__MODULE__{} = graph, subjects) when is_list(subjects) do def delete_subjects(%__MODULE__{} = graph, subjects) when is_list(subjects) do
Enum.reduce subjects, graph, fn (subject, graph) -> Enum.reduce(subjects, graph, fn subject, graph ->
delete_subjects(graph, subject) delete_subjects(graph, subject)
end end)
end end
def delete_subjects(%__MODULE__{descriptions: descriptions} = graph, subject) do def delete_subjects(%__MODULE__{descriptions: descriptions} = graph, subject) do
@ -395,7 +397,6 @@ defmodule RDF.Graph do
end end
end end
@doc """ @doc """
Updates the description of the `subject` in `graph` with the given function. Updates the description of the `subject` in `graph` with the given function.
@ -428,8 +429,8 @@ defmodule RDF.Graph do
""" """
@spec update( @spec update(
t, t,
Statement.coercible_subject, Statement.coercible_subject(),
Description.statements | [Description.statements] | nil, Description.statements() | [Description.statements()] | nil,
update_description_fun update_description_fun
) :: t ) :: t
def update(graph = %__MODULE__{}, subject, initial \\ nil, fun) do def update(graph = %__MODULE__{}, subject, initial \\ nil, fun) do
@ -447,18 +448,17 @@ defmodule RDF.Graph do
description description
|> fun.() |> fun.()
|> case do |> case do
nil -> nil ->
delete_subjects(graph, subject) delete_subjects(graph, subject)
new_description -> new_description ->
graph graph
|> delete_subjects(subject) |> delete_subjects(subject)
|> add(Description.new(subject, new_description)) |> add(Description.new(subject, new_description))
end end
end end
end end
@doc """ @doc """
Fetches the description of the given subject. Fetches the description of the given subject.
@ -474,7 +474,7 @@ defmodule RDF.Graph do
""" """
@impl Access @impl Access
@spec fetch(t, Statement.coercible_subject) :: {:ok, Description.t} | :error @spec fetch(t, Statement.coercible_subject()) :: {:ok, Description.t()} | :error
def fetch(%__MODULE__{descriptions: descriptions}, subject) do def fetch(%__MODULE__{descriptions: descriptions}, subject) do
Access.fetch(descriptions, coerce_subject(subject)) Access.fetch(descriptions, coerce_subject(subject))
end end
@ -519,29 +519,28 @@ defmodule RDF.Graph do
:bar :bar
""" """
@spec get(t, Statement.coercible_subject, Description.t | nil) :: Description.t | nil @spec get(t, Statement.coercible_subject(), Description.t() | nil) :: Description.t() | nil
def get(%__MODULE__{} = graph, subject, default \\ nil) do def get(%__MODULE__{} = graph, subject, default \\ nil) do
case fetch(graph, subject) do case fetch(graph, subject) do
{:ok, value} -> value {:ok, value} -> value
:error -> default :error -> default
end end
end end
@doc """ @doc """
The `RDF.Description` of the given subject. The `RDF.Description` of the given subject.
""" """
@spec description(t, Statement.coercible_subject) :: Description.t | nil @spec description(t, Statement.coercible_subject()) :: Description.t() | nil
def description(%__MODULE__{descriptions: descriptions}, subject), def description(%__MODULE__{descriptions: descriptions}, subject),
do: Map.get(descriptions, coerce_subject(subject)) do: Map.get(descriptions, coerce_subject(subject))
@doc """ @doc """
All `RDF.Description`s within a `RDF.Graph`. All `RDF.Description`s within a `RDF.Graph`.
""" """
@spec descriptions(t) :: [Description.t] @spec descriptions(t) :: [Description.t()]
def descriptions(%__MODULE__{descriptions: descriptions}), def descriptions(%__MODULE__{descriptions: descriptions}),
do: Map.values(descriptions) do: Map.values(descriptions)
@doc """ @doc """
Gets and updates the description of the given subject, in a single pass. Gets and updates the description of the given subject, in a single pass.
@ -566,38 +565,44 @@ defmodule RDF.Graph do
""" """
@impl Access @impl Access
@spec get_and_update(t, Statement.coercible_subject, get_and_update_description_fun) :: @spec get_and_update(t, Statement.coercible_subject(), get_and_update_description_fun) ::
{Description.t, input} {Description.t(), input}
def get_and_update(%__MODULE__{} = graph, subject, fun) do def get_and_update(%__MODULE__{} = graph, subject, fun) do
with subject = coerce_subject(subject) do with subject = coerce_subject(subject) do
case fun.(get(graph, subject)) do case fun.(get(graph, subject)) do
{old_description, new_description} -> {old_description, new_description} ->
{old_description, put(graph, subject, new_description)} {old_description, put(graph, subject, new_description)}
:pop -> :pop ->
pop(graph, subject) pop(graph, subject)
other -> other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" raise "the given function must return a two-element tuple or :pop, got: #{
inspect(other)
}"
end end
end end
end end
@doc """ @doc """
Pops an arbitrary triple from a `RDF.Graph`. Pops an arbitrary triple from a `RDF.Graph`.
""" """
@spec pop(t) :: {Statement.t | nil, t} @spec pop(t) :: {Statement.t() | nil, t}
def pop(graph) def pop(graph)
def pop(%__MODULE__{descriptions: descriptions} = graph) def pop(%__MODULE__{descriptions: descriptions} = graph)
when descriptions == %{}, do: {nil, graph} when descriptions == %{},
do: {nil, graph}
def pop(%__MODULE__{descriptions: descriptions} = graph) do def pop(%__MODULE__{descriptions: descriptions} = graph) do
# TODO: Find a faster way ... # TODO: Find a faster way ...
[{subject, description}] = Enum.take(descriptions, 1) [{subject, description}] = Enum.take(descriptions, 1)
{triple, popped_description} = Description.pop(description) {triple, popped_description} = Description.pop(description)
popped = if Enum.empty?(popped_description),
do: descriptions |> Map.delete(subject), popped =
else: descriptions |> Map.put(subject, popped_description) if Enum.empty?(popped_description),
do: descriptions |> Map.delete(subject),
else: descriptions |> Map.put(subject, popped_description)
{triple, %__MODULE__{graph | descriptions: popped}} {triple, %__MODULE__{graph | descriptions: popped}}
end end
@ -617,17 +622,17 @@ defmodule RDF.Graph do
""" """
@impl Access @impl Access
@spec pop(t, Statement.coercible_subject) :: {Description.t | nil, t} @spec pop(t, Statement.coercible_subject()) :: {Description.t() | nil, t}
def pop(%__MODULE__{descriptions: descriptions} = graph, subject) do def pop(%__MODULE__{descriptions: descriptions} = graph, subject) do
case Access.pop(descriptions, coerce_subject(subject)) do case Access.pop(descriptions, coerce_subject(subject)) do
{nil, _} -> {nil, _} ->
{nil, graph} {nil, graph}
{description, new_descriptions} -> {description, new_descriptions} ->
{description, %__MODULE__{graph | descriptions: new_descriptions}} {description, %__MODULE__{graph | descriptions: new_descriptions}}
end end
end end
@doc """ @doc """
The number of subjects within a `RDF.Graph`. The number of subjects within a `RDF.Graph`.
@ -660,9 +665,9 @@ defmodule RDF.Graph do
""" """
@spec triple_count(t) :: non_neg_integer @spec triple_count(t) :: non_neg_integer
def triple_count(%__MODULE__{descriptions: descriptions}) do def triple_count(%__MODULE__{descriptions: descriptions}) do
Enum.reduce descriptions, 0, fn ({_subject, description}, count) -> Enum.reduce(descriptions, 0, fn {_subject, description}, count ->
count + Description.count(description) count + Description.count(description)
end end)
end end
@doc """ @doc """
@ -678,7 +683,7 @@ defmodule RDF.Graph do
MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2)]) MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2)])
""" """
def subjects(%__MODULE__{descriptions: descriptions}), def subjects(%__MODULE__{descriptions: descriptions}),
do: descriptions |> Map.keys |> MapSet.new do: descriptions |> Map.keys() |> MapSet.new()
@doc """ @doc """
The set of all properties used in the predicates of the statements within a `RDF.Graph`. The set of all properties used in the predicates of the statements within a `RDF.Graph`.
@ -693,11 +698,11 @@ defmodule RDF.Graph do
MapSet.new([EX.p1, EX.p2]) MapSet.new([EX.p1, EX.p2])
""" """
def predicates(%__MODULE__{descriptions: descriptions}) do def predicates(%__MODULE__{descriptions: descriptions}) do
Enum.reduce descriptions, MapSet.new, fn ({_, description}, acc) -> Enum.reduce(descriptions, MapSet.new(), fn {_, description}, acc ->
description description
|> Description.predicates |> Description.predicates()
|> MapSet.union(acc) |> MapSet.union(acc)
end end)
end end
@doc """ @doc """
@ -717,11 +722,11 @@ defmodule RDF.Graph do
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)]) MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
""" """
def objects(%__MODULE__{descriptions: descriptions}) do def objects(%__MODULE__{descriptions: descriptions}) do
Enum.reduce descriptions, MapSet.new, fn ({_, description}, acc) -> Enum.reduce(descriptions, MapSet.new(), fn {_, description}, acc ->
description description
|> Description.objects |> Description.objects()
|> MapSet.union(acc) |> MapSet.union(acc)
end end)
end end
@doc """ @doc """
@ -739,11 +744,12 @@ defmodule RDF.Graph do
RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2]) RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2])
""" """
def resources(graph = %__MODULE__{descriptions: descriptions}) do def resources(graph = %__MODULE__{descriptions: descriptions}) do
Enum.reduce(descriptions, MapSet.new, fn ({_, description}, acc) -> Enum.reduce(descriptions, MapSet.new(), fn {_, description}, acc ->
description description
|> Description.resources |> Description.resources()
|> MapSet.union(acc) |> MapSet.union(acc)
end) |> MapSet.union(subjects(graph)) end)
|> MapSet.union(subjects(graph))
end end
@doc """ @doc """
@ -760,18 +766,19 @@ defmodule RDF.Graph do
{RDF.iri(EX.S1), RDF.iri(EX.p2), RDF.iri(EX.O3)}, {RDF.iri(EX.S1), RDF.iri(EX.p2), RDF.iri(EX.O3)},
{RDF.iri(EX.S2), RDF.iri(EX.p2), RDF.iri(EX.O2)}] {RDF.iri(EX.S2), RDF.iri(EX.p2), RDF.iri(EX.O2)}]
""" """
@spec triples(t) :: [Statement.t] @spec triples(t) :: [Statement.t()]
def triples(%__MODULE__{} = graph), do: Enum.to_list(graph) def triples(%__MODULE__{} = graph), do: Enum.to_list(graph)
defdelegate statements(graph), to: RDF.Graph, as: :triples defdelegate statements(graph), to: RDF.Graph, as: :triples
@doc """ @doc """
Checks if the given statement exists within a `RDF.Graph`. Checks if the given statement exists within a `RDF.Graph`.
""" """
@spec include?(t, Statement.t) :: boolean @spec include?(t, Statement.t()) :: boolean
def include?(%__MODULE__{descriptions: descriptions}, def include?(
triple = {subject, _, _}) do %__MODULE__{descriptions: descriptions},
triple = {subject, _, _}
) do
with subject = coerce_subject(subject), with subject = coerce_subject(subject),
%Description{} <- description = descriptions[subject] do %Description{} <- description = descriptions[subject] do
Description.include?(description, triple) Description.include?(description, triple)
@ -790,14 +797,13 @@ defmodule RDF.Graph do
iex> RDF.Graph.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Graph.describes?(EX.S2) iex> RDF.Graph.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Graph.describes?(EX.S2)
false false
""" """
@spec describes?(t, Statement.coercible_subject) :: boolean @spec describes?(t, Statement.coercible_subject()) :: boolean
def describes?(%__MODULE__{descriptions: descriptions}, subject) do def describes?(%__MODULE__{descriptions: descriptions}, subject) do
with subject = coerce_subject(subject) do with subject = coerce_subject(subject) do
Map.has_key?(descriptions, subject) Map.has_key?(descriptions, subject)
end end
end end
@doc """ @doc """
Returns a nested map of the native Elixir values of a `RDF.Graph`. Returns a nested map of the native Elixir values of a `RDF.Graph`.
@ -840,13 +846,13 @@ defmodule RDF.Graph do
} }
""" """
@spec values(t, Statement.term_mapping) :: map @spec values(t, Statement.term_mapping()) :: map
def values(graph, mapping \\ &RDF.Statement.default_term_mapping/1) def values(graph, mapping \\ &RDF.Statement.default_term_mapping/1)
def values(%__MODULE__{descriptions: descriptions}, mapping) do def values(%__MODULE__{descriptions: descriptions}, mapping) do
Map.new descriptions, fn {subject, description} -> Map.new(descriptions, fn {subject, description} ->
{mapping.({:subject, subject}), Description.values(description, mapping)} {mapping.({:subject, subject}), Description.values(description, mapping)}
end end)
end end
@doc """ @doc """
@ -858,26 +864,32 @@ defmodule RDF.Graph do
If `nil` is passed as the `subjects`, the subjects will not be limited. If `nil` is passed as the `subjects`, the subjects will not be limited.
""" """
@spec take(t, [Statement.coercible_subject] | Enum.t | nil, [Statement.coercible_predicate] | Enum.t | nil) :: t @spec take(
t,
[Statement.coercible_subject()] | Enum.t() | nil,
[Statement.coercible_predicate()] | Enum.t() | nil
) :: t
def take(graph, subjects, properties \\ nil) def take(graph, subjects, properties \\ nil)
def take(%__MODULE__{} = graph, nil, nil), do: graph def take(%__MODULE__{} = graph, nil, nil), do: graph
def take(%__MODULE__{descriptions: descriptions} = graph, subjects, nil) do def take(%__MODULE__{descriptions: descriptions} = graph, subjects, nil) do
subjects = Enum.map(subjects, &(coerce_subject/1)) subjects = Enum.map(subjects, &coerce_subject/1)
%__MODULE__{graph | descriptions: Map.take(descriptions, subjects)} %__MODULE__{graph | descriptions: Map.take(descriptions, subjects)}
end end
def take(%__MODULE__{} = graph, subjects, properties) do def take(%__MODULE__{} = graph, subjects, properties) do
graph = take(graph, subjects, nil) graph = take(graph, subjects, nil)
%__MODULE__{graph |
descriptions: Map.new(graph.descriptions, fn {subject, description} -> %__MODULE__{
{subject, Description.take(description, properties)} graph
end) | descriptions:
Map.new(graph.descriptions, fn {subject, description} ->
{subject, Description.take(description, properties)}
end)
} }
end end
@doc """ @doc """
Checks if two `RDF.Graph`s are equal. Checks if two `RDF.Graph`s are equal.
@ -893,7 +905,6 @@ defmodule RDF.Graph do
def equal?(_, _), do: false def equal?(_, _), do: false
@doc """ @doc """
Adds `prefixes` to the given `graph`. Adds `prefixes` to the given `graph`.
@ -906,8 +917,8 @@ defmodule RDF.Graph do
""" """
@spec add_prefixes( @spec add_prefixes(
t, t,
PrefixMap.t | map | keyword | nil, PrefixMap.t() | map | keyword | nil,
PrefixMap.conflict_resolver | nil PrefixMap.conflict_resolver() | nil
) :: t ) :: t
def add_prefixes(graph, prefixes, conflict_resolver \\ nil) def add_prefixes(graph, prefixes, conflict_resolver \\ nil)
@ -922,9 +933,7 @@ defmodule RDF.Graph do
end end
def add_prefixes(%__MODULE__{prefixes: prefixes} = graph, additions, conflict_resolver) do def add_prefixes(%__MODULE__{prefixes: prefixes} = graph, additions, conflict_resolver) do
%__MODULE__{graph | %__MODULE__{graph | prefixes: RDF.PrefixMap.merge!(prefixes, additions, conflict_resolver)}
prefixes: RDF.PrefixMap.merge!(prefixes, additions, conflict_resolver)
}
end end
@doc """ @doc """
@ -933,7 +942,7 @@ defmodule RDF.Graph do
The `prefixes` can be a single prefix or a list of prefixes. The `prefixes` can be a single prefix or a list of prefixes.
Prefixes not in prefixes of the graph are simply ignored. Prefixes not in prefixes of the graph are simply ignored.
""" """
@spec delete_prefixes(t, PrefixMap.t) :: t @spec delete_prefixes(t, PrefixMap.t()) :: t
def delete_prefixes(graph, prefixes) def delete_prefixes(graph, prefixes)
def delete_prefixes(%__MODULE__{prefixes: nil} = graph, _), do: graph def delete_prefixes(%__MODULE__{prefixes: nil} = graph, _), do: graph
@ -955,7 +964,7 @@ defmodule RDF.Graph do
The `base_iri` can be given as anything accepted by `RDF.IRI.coerce_base/1`. The `base_iri` can be given as anything accepted by `RDF.IRI.coerce_base/1`.
""" """
@spec set_base_iri(t, IRI.t | nil) :: t @spec set_base_iri(t, IRI.t() | nil) :: t
def set_base_iri(graph, base_iri) def set_base_iri(graph, base_iri)
def set_base_iri(%__MODULE__{} = graph, nil) do def set_base_iri(%__MODULE__{} = graph, nil) do
@ -984,16 +993,16 @@ defmodule RDF.Graph do
|> clear_prefixes() |> clear_prefixes()
end end
defimpl Enumerable do defimpl Enumerable do
alias RDF.Graph alias RDF.Graph
def member?(graph, triple), do: {:ok, Graph.include?(graph, triple)} def member?(graph, triple), do: {:ok, Graph.include?(graph, triple)}
def count(graph), do: {:ok, Graph.triple_count(graph)} def count(graph), do: {:ok, Graph.triple_count(graph)}
def slice(_graph), do: {:error, __MODULE__} def slice(_graph), do: {:error, __MODULE__}
def reduce(%Graph{descriptions: descriptions}, {:cont, acc}, _fun) def reduce(%Graph{descriptions: descriptions}, {:cont, acc}, _fun)
when map_size(descriptions) == 0, do: {:done, acc} when map_size(descriptions) == 0,
do: {:done, acc}
def reduce(%Graph{} = graph, {:cont, acc}, fun) do def reduce(%Graph{} = graph, {:cont, acc}, fun) do
{triple, rest} = Graph.pop(graph) {triple, rest} = Graph.pop(graph)
@ -1001,6 +1010,7 @@ defmodule RDF.Graph do
end end
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(%Graph{} = graph, {:suspend, acc}, fun) do def reduce(%Graph{} = graph, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(graph, &1, fun)} {:suspended, acc, &reduce(graph, &1, fun)}
end end
@ -1011,11 +1021,17 @@ defmodule RDF.Graph do
def into(original) do def into(original) do
collector_fun = fn collector_fun = fn
graph, {:cont, list} when is_list(list) graph, {:cont, list} when is_list(list) ->
-> Graph.add(graph, List.to_tuple(list)) Graph.add(graph, List.to_tuple(list))
graph, {:cont, elem} -> Graph.add(graph, elem)
graph, :done -> graph graph, {:cont, elem} ->
_graph, :halt -> :ok Graph.add(graph, elem)
graph, :done ->
graph
_graph, :halt ->
:ok
end end
{original, collector_fun} {original, collector_fun}

View File

@ -3,35 +3,34 @@ defmodule RDF.InspectHelper do
import Inspect.Algebra import Inspect.Algebra
def objects_doc(objects, opts) do def objects_doc(objects, opts) do
objects objects
|> Enum.map(fn {object, _} -> to_doc(object, opts) end) |> Enum.map(fn {object, _} -> to_doc(object, opts) end)
|> fold_doc(fn(object, acc) -> line(object, acc) end) |> fold_doc(fn object, acc -> line(object, acc) end)
end end
def predications_doc(predications, opts) do def predications_doc(predications, opts) do
predications predications
|> Enum.map(fn {predicate, objects} -> |> Enum.map(fn {predicate, objects} ->
to_doc(predicate, opts) to_doc(predicate, opts)
|> line(objects_doc(objects, opts)) |> line(objects_doc(objects, opts))
|> nest(4) |> nest(4)
end) end)
|> fold_doc(fn(predication, acc) -> |> fold_doc(fn predication, acc ->
line(predication, acc) line(predication, acc)
end) end)
end end
def descriptions_doc(descriptions, opts) do def descriptions_doc(descriptions, opts) do
descriptions descriptions
|> Enum.map(fn {subject, description} -> |> Enum.map(fn {subject, description} ->
to_doc(subject, opts) to_doc(subject, opts)
|> line(predications_doc(description.predications, opts)) |> line(predications_doc(description.predications, opts))
|> nest(4) |> nest(4)
end) end)
|> fold_doc(fn(predication, acc) -> |> fold_doc(fn predication, acc ->
line(predication, acc) line(predication, acc)
end) end)
end end
def surround_doc(left, doc, right) do def surround_doc(left, doc, right) do
@ -53,7 +52,7 @@ end
defimpl Inspect, for: RDF.Literal do defimpl Inspect, for: RDF.Literal do
def inspect(literal, _opts) do def inspect(literal, _opts) do
"%RDF.Literal{literal: #{inspect literal.literal}, valid: #{RDF.Literal.valid?(literal)}}" "%RDF.Literal{literal: #{inspect(literal.literal)}, valid: #{RDF.Literal.valid?(literal)}}"
end end
end end
@ -66,6 +65,7 @@ defimpl Inspect, for: RDF.Description do
space("subject:", to_doc(subject, opts)) space("subject:", to_doc(subject, opts))
|> line(predications_doc(predications, opts)) |> line(predications_doc(predications, opts))
|> nest(4) |> nest(4)
surround_doc("#RDF.Description{", doc, "}") surround_doc("#RDF.Description{", doc, "}")
end end
end end
@ -79,6 +79,7 @@ defimpl Inspect, for: RDF.Graph do
space("name:", to_doc(name, opts)) space("name:", to_doc(name, opts))
|> line(descriptions_doc(descriptions, opts)) |> line(descriptions_doc(descriptions, opts))
|> nest(4) |> nest(4)
surround_doc("#RDF.Graph{", doc, "}") surround_doc("#RDF.Graph{", doc, "}")
end end
end end
@ -92,12 +93,13 @@ defimpl Inspect, for: RDF.Dataset do
space("name:", to_doc(name, opts)) space("name:", to_doc(name, opts))
|> line(graphs_doc(RDF.Dataset.graphs(dataset), opts)) |> line(graphs_doc(RDF.Dataset.graphs(dataset), opts))
|> nest(4) |> nest(4)
surround_doc("#RDF.Dataset{", doc, "}") surround_doc("#RDF.Dataset{", doc, "}")
end end
defp graphs_doc(graphs, opts) do defp graphs_doc(graphs, opts) do
graphs graphs
|> Enum.map(fn graph -> to_doc(graph, opts) end) |> Enum.map(fn graph -> to_doc(graph, opts) end)
|> fold_doc(fn(graph, acc) -> line(graph, acc) end) |> fold_doc(fn graph, acc -> line(graph, acc) end)
end end
end end

View File

@ -20,10 +20,10 @@ defmodule RDF.IRI do
import RDF.Guards import RDF.Guards
@type t :: %__MODULE__{ @type t :: %__MODULE__{
value: String.t value: String.t()
} }
@type coercible :: String.t | URI.t | module | t @type coercible :: String.t() | URI.t() | module | t
@enforce_keys [:value] @enforce_keys [:value]
defstruct [:value] defstruct [:value]
@ -44,16 +44,15 @@ defmodule RDF.IRI do
@default_base Application.get_env(:rdf, :default_base_iri) @default_base Application.get_env(:rdf, :default_base_iri)
def default_base, do: @default_base def default_base, do: @default_base
@doc """ @doc """
Creates a `RDF.IRI`. Creates a `RDF.IRI`.
""" """
@spec new(coercible) :: t @spec new(coercible) :: t
def new(iri) def new(iri)
def new(iri) when is_binary(iri), do: %__MODULE__{value: iri} def new(iri) when is_binary(iri), do: %__MODULE__{value: iri}
def new(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname) def new(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname)
def new(%URI{} = uri), do: uri |> URI.to_string |> new def new(%URI{} = uri), do: uri |> URI.to_string() |> new
def new(%__MODULE__{} = iri), do: iri def new(%__MODULE__{} = iri), do: iri
@doc """ @doc """
Creates a `RDF.IRI`, but checks if the given IRI is valid. Creates a `RDF.IRI`, but checks if the given IRI is valid.
@ -64,11 +63,11 @@ defmodule RDF.IRI do
""" """
@spec new!(coercible) :: t @spec new!(coercible) :: t
def new!(iri) def new!(iri)
def new!(iri) when is_binary(iri), do: iri |> valid!() |> new() def new!(iri) when is_binary(iri), do: iri |> valid!() |> new()
def new!(qname) when maybe_ns_term(qname), do: new(qname) # since terms of a namespace are already validated # since terms of a namespace are already validated
def new!(%URI{} = uri), do: uri |> valid!() |> new() def new!(qname) when maybe_ns_term(qname), do: new(qname)
def new!(%__MODULE__{} = iri), do: valid!(iri) def new!(%URI{} = uri), do: uri |> valid!() |> new()
def new!(%__MODULE__{} = iri), do: valid!(iri)
@doc """ @doc """
Coerces an IRI serving as a base IRI. Coerces an IRI serving as a base IRI.
@ -87,8 +86,8 @@ defmodule RDF.IRI do
new(module) new(module)
end end
end end
def coerce_base(base_iri), do: new(base_iri)
def coerce_base(base_iri), do: new(base_iri)
@doc """ @doc """
Returns the given value unchanged if it's a valid IRI, otherwise raises an exception. Returns the given value unchanged if it's a valid IRI, otherwise raises an exception.
@ -104,11 +103,10 @@ defmodule RDF.IRI do
""" """
@spec valid!(coercible) :: coercible @spec valid!(coercible) :: coercible
def valid!(iri) do def valid!(iri) do
if not valid?(iri), do: raise RDF.IRI.InvalidError, "Invalid IRI: #{inspect iri}" if not valid?(iri), do: raise(RDF.IRI.InvalidError, "Invalid IRI: #{inspect(iri)}")
iri iri
end end
@doc """ @doc """
Checks if the given IRI is valid. Checks if the given IRI is valid.
@ -122,8 +120,8 @@ defmodule RDF.IRI do
false false
""" """
@spec valid?(coercible) :: boolean @spec valid?(coercible) :: boolean
def valid?(iri), do: absolute?(iri) # TODO: Provide a more elaborate validation # TODO: Provide a more elaborate validation
def valid?(iri), do: absolute?(iri)
@doc """ @doc """
Checks if the given value is an absolute IRI. Checks if the given value is an absolute IRI.
@ -135,17 +133,18 @@ defmodule RDF.IRI do
def absolute?(iri) def absolute?(iri)
def absolute?(value) when is_binary(value), do: not is_nil(scheme(value)) def absolute?(value) when is_binary(value), do: not is_nil(scheme(value))
def absolute?(%__MODULE__{value: value}), do: absolute?(value) def absolute?(%__MODULE__{value: value}), do: absolute?(value)
def absolute?(%URI{scheme: nil}), do: false def absolute?(%URI{scheme: nil}), do: false
def absolute?(%URI{scheme: _}), do: true def absolute?(%URI{scheme: _}), do: true
def absolute?(qname) when maybe_ns_term(qname) do def absolute?(qname) when maybe_ns_term(qname) do
case Namespace.resolve_term(qname) do case Namespace.resolve_term(qname) do
{:ok, iri} -> absolute?(iri) {:ok, iri} -> absolute?(iri)
_ -> false _ -> false
end end
end end
def absolute?(_), do: false
def absolute?(_), do: false
@doc """ @doc """
Resolves a relative IRI against a base IRI. Resolves a relative IRI against a base IRI.
@ -162,13 +161,12 @@ defmodule RDF.IRI do
@spec absolute(coercible, coercible) :: t | nil @spec absolute(coercible, coercible) :: t | nil
def absolute(iri, base) do def absolute(iri, base) do
cond do cond do
absolute?(iri) -> new(iri) absolute?(iri) -> new(iri)
not absolute?(base) -> nil not absolute?(base) -> nil
true -> merge(base, iri) true -> merge(base, iri)
end end
end end
@doc """ @doc """
Merges two IRIs. Merges two IRIs.
@ -183,7 +181,6 @@ defmodule RDF.IRI do
|> new() |> new()
end end
@doc """ @doc """
Returns the scheme of the given IRI Returns the scheme of the given IRI
@ -196,28 +193,27 @@ defmodule RDF.IRI do
iex> RDF.IRI.scheme("not an iri") iex> RDF.IRI.scheme("not an iri")
nil nil
""" """
@spec scheme(coercible) :: String.t | nil @spec scheme(coercible) :: String.t() | nil
def scheme(iri) def scheme(iri)
def scheme(%__MODULE__{value: value}), do: scheme(value) def scheme(%__MODULE__{value: value}), do: scheme(value)
def scheme(%URI{scheme: scheme}), do: scheme def scheme(%URI{scheme: scheme}), do: scheme
def scheme(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname) |> scheme() def scheme(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname) |> scheme()
def scheme(iri) when is_binary(iri) do def scheme(iri) when is_binary(iri) do
with [_, scheme] <- Regex.run(@scheme_regex, iri) do with [_, scheme] <- Regex.run(@scheme_regex, iri) do
scheme scheme
end end
end end
@doc """ @doc """
Parses an IRI into its components and returns them as an `URI` struct. Parses an IRI into its components and returns them as an `URI` struct.
""" """
@spec parse(coercible) :: URI.t @spec parse(coercible) :: URI.t()
def parse(iri) def parse(iri)
def parse(iri) when is_binary(iri), do: URI.parse(iri) def parse(iri) when is_binary(iri), do: URI.parse(iri)
def parse(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname) |> parse() def parse(qname) when maybe_ns_term(qname), do: Namespace.resolve_term!(qname) |> parse()
def parse(%__MODULE__{value: value}), do: URI.parse(value) def parse(%__MODULE__{value: value}), do: URI.parse(value)
def parse(%URI{} = uri), do: uri def parse(%URI{} = uri), do: uri
@doc """ @doc """
Tests for value equality of IRIs. Tests for value equality of IRIs.
@ -226,7 +222,8 @@ defmodule RDF.IRI do
see <https://www.w3.org/TR/rdf-concepts/#section-Graph-URIref> see <https://www.w3.org/TR/rdf-concepts/#section-Graph-URIref>
""" """
@spec equal_value?(t | RDF.Literal.t | atom, t | RDF.Literal.t | URI.t | atom) :: boolean | nil @spec equal_value?(t | RDF.Literal.t() | atom, t | RDF.Literal.t() | URI.t() | atom) ::
boolean | nil
def equal_value?(left, right) def equal_value?(left, right)
def equal_value?(%__MODULE__{value: left}, %__MODULE__{value: right}), def equal_value?(%__MODULE__{value: left}, %__MODULE__{value: right}),
@ -251,7 +248,6 @@ defmodule RDF.IRI do
def equal_value?(_, _), def equal_value?(_, _),
do: nil do: nil
@doc """ @doc """
Returns the given IRI as a string. Returns the given IRI as a string.
@ -267,7 +263,7 @@ defmodule RDF.IRI do
"http://example.com/#Foo" "http://example.com/#Foo"
""" """
@spec to_string(t | module) :: String.t @spec to_string(t | module) :: String.t()
def to_string(iri) def to_string(iri)
def to_string(%__MODULE__{value: value}), def to_string(%__MODULE__{value: value}),

View File

@ -12,16 +12,15 @@ defmodule RDF.List do
import RDF.Guards import RDF.Guards
@type t :: %__MODULE__{ @type t :: %__MODULE__{
head: IRI.t, head: IRI.t(),
graph: Graph.t graph: Graph.t()
} }
@enforce_keys [:head] @enforce_keys [:head]
defstruct [:head, :graph] defstruct [:head, :graph]
@rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil") @rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil")
@doc """ @doc """
Creates a `RDF.List` for a given RDF list node of a given `RDF.Graph`. Creates a `RDF.List` for a given RDF list node of a given `RDF.Graph`.
@ -33,7 +32,7 @@ defmodule RDF.List do
- does not contain cycles, i.e. `rdf:rest` statements don't refer to - does not contain cycles, i.e. `rdf:rest` statements don't refer to
preceding list nodes preceding list nodes
""" """
@spec new(IRI.coercible, Graph.t) :: t @spec new(IRI.coercible(), Graph.t()) :: t
def new(head, graph) def new(head, graph)
def new(head, graph) when maybe_ns_term(head), def new(head, graph) when maybe_ns_term(head),
@ -48,7 +47,7 @@ defmodule RDF.List do
end end
defp well_formed?(list) do defp well_formed?(list) do
Enum.reduce_while(list, MapSet.new, fn node_description, preceding_nodes -> Enum.reduce_while(list, MapSet.new(), fn node_description, preceding_nodes ->
with head = node_description.subject do with head = node_description.subject do
if MapSet.member?(preceding_nodes, head) do if MapSet.member?(preceding_nodes, head) do
{:halt, false} {:halt, false}
@ -59,7 +58,6 @@ defmodule RDF.List do
end) && true end) && true
end end
@doc """ @doc """
Creates a `RDF.List` from a native Elixir list or any other `Enumerable` with coercible RDF values. Creates a `RDF.List` from a native Elixir list or any other `Enumerable` with coercible RDF values.
@ -73,18 +71,17 @@ defmodule RDF.List do
the head node of the empty list is always `RDF.nil`. the head node of the empty list is always `RDF.nil`.
""" """
@spec from(Enumerable.t, keyword) :: t @spec from(Enumerable.t(), keyword) :: t
def from(list, opts \\ []) do def from(list, opts \\ []) do
with head = Keyword.get(opts, :head, RDF.bnode), with head = Keyword.get(opts, :head, RDF.bnode()),
graph = Keyword.get(opts, :graph, RDF.graph), graph = Keyword.get(opts, :graph, RDF.graph()),
{head, graph} = do_from(list, head, graph, opts) {head, graph} = do_from(list, head, graph, opts) do
do
%__MODULE__{head: head, graph: graph} %__MODULE__{head: head, graph: graph}
end end
end end
defp do_from([], _, graph, _) do defp do_from([], _, graph, _) do
{RDF.nil, graph} {RDF.nil(), graph}
end end
defp do_from(list, head, graph, opts) when maybe_ns_term(head) do defp do_from(list, head, graph, opts) when maybe_ns_term(head) do
@ -92,16 +89,17 @@ defmodule RDF.List do
end end
defp do_from([list | rest], head, graph, opts) when is_list(list) do defp do_from([list | rest], head, graph, opts) when is_list(list) do
with {nested_list_node, graph} = do_from(list, RDF.bnode, graph, opts) do with {nested_list_node, graph} = do_from(list, RDF.bnode(), graph, opts) do
do_from([nested_list_node | rest], head, graph, opts) do_from([nested_list_node | rest], head, graph, opts)
end end
end end
defp do_from([first | rest], head, graph, opts) do defp do_from([first | rest], head, graph, opts) do
with {next, graph} = do_from(rest, RDF.bnode, graph, opts) do with {next, graph} = do_from(rest, RDF.bnode(), graph, opts) do
{ {
head, head,
Graph.add(graph, Graph.add(
graph,
head head
|> RDF.first(first) |> RDF.first(first)
|> RDF.rest(next) |> RDF.rest(next)
@ -116,16 +114,16 @@ defmodule RDF.List do
|> do_from(head, graph, opts) |> do_from(head, graph, opts)
end end
@doc """ @doc """
The values of a `RDF.List` as an Elixir list. The values of a `RDF.List` as an Elixir list.
Nested lists are converted recursively. Nested lists are converted recursively.
""" """
@spec values(t) :: Enumerable.t @spec values(t) :: Enumerable.t()
def values(%__MODULE__{graph: graph} = list) do def values(%__MODULE__{graph: graph} = list) do
Enum.map list, fn node_description -> Enum.map(list, fn node_description ->
value = Description.first(node_description, RDF.first) value = Description.first(node_description, RDF.first())
if node?(value, graph) do if node?(value, graph) do
value value
|> new(graph) |> new(graph)
@ -133,26 +131,23 @@ defmodule RDF.List do
else else
value value
end end
end end)
end end
@doc """ @doc """
The RDF nodes constituting a `RDF.List` as an Elixir list. The RDF nodes constituting a `RDF.List` as an Elixir list.
""" """
@spec nodes(t) :: [BlankNode.t] @spec nodes(t) :: [BlankNode.t()]
def nodes(%__MODULE__{} = list) do def nodes(%__MODULE__{} = list) do
Enum.map list, fn node_description -> node_description.subject end Enum.map(list, fn node_description -> node_description.subject end)
end end
@doc """ @doc """
Checks if a list is the empty list. Checks if a list is the empty list.
""" """
@spec empty?(t) :: boolean @spec empty?(t) :: boolean
def empty?(%__MODULE__{head: @rdf_nil}), do: true def empty?(%__MODULE__{head: @rdf_nil}), do: true
def empty?(%__MODULE__{}), do: false def empty?(%__MODULE__{}), do: false
@doc """ @doc """
Checks if the given list consists of list nodes which are all blank nodes. Checks if the given list consists of list nodes which are all blank nodes.
@ -161,12 +156,11 @@ defmodule RDF.List do
def valid?(%__MODULE__{head: @rdf_nil}), do: true def valid?(%__MODULE__{head: @rdf_nil}), do: true
def valid?(%__MODULE__{} = list) do def valid?(%__MODULE__{} = list) do
Enum.all? list, fn node_description -> Enum.all?(list, fn node_description ->
RDF.bnode?(node_description.subject) RDF.bnode?(node_description.subject)
end end)
end end
@doc """ @doc """
Checks if a given resource is a RDF list node in a given `RDF.Graph`. Checks if a given resource is a RDF list node in a given `RDF.Graph`.
@ -176,7 +170,7 @@ defmodule RDF.List do
Note: This function doesn't indicate if the list is valid. Note: This function doesn't indicate if the list is valid.
See `new/2` and `valid?/2` for validations. See `new/2` and `valid?/2` for validations.
""" """
@spec node?(any, Graph.t) :: boolean @spec node?(any, Graph.t()) :: boolean
def node?(list_node, graph) def node?(list_node, graph)
def node?(@rdf_nil, _), def node?(@rdf_nil, _),
@ -204,15 +198,14 @@ defmodule RDF.List do
def node?(nil), do: false def node?(nil), do: false
def node?(%Description{predications: predications}) do def node?(%Description{predications: predications}) do
Map.has_key?(predications, RDF.first) and Map.has_key?(predications, RDF.first()) and
Map.has_key?(predications, RDF.rest) Map.has_key?(predications, RDF.rest())
end end
defimpl Enumerable do defimpl Enumerable do
@rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil") @rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil")
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
def reduce(%RDF.List{head: @rdf_nil}, {:cont, acc}, _fun), def reduce(%RDF.List{head: @rdf_nil}, {:cont, acc}, _fun),
@ -226,19 +219,17 @@ defmodule RDF.List do
def reduce(_, _, _), do: {:halted, nil} def reduce(_, _, _), do: {:halted, nil}
defp do_reduce(%RDF.List{head: head, graph: graph}, defp do_reduce(%RDF.List{head: head, graph: graph}, {:cont, acc}, fun) do
{:cont, acc}, fun) do
with description when not is_nil(description) <- with description when not is_nil(description) <-
Graph.description(graph, head), Graph.description(graph, head),
[_] <- Description.get(description, RDF.first), [_] <- Description.get(description, RDF.first()),
[rest] <- Description.get(description, RDF.rest), [rest] <- Description.get(description, RDF.rest()),
acc = fun.(description, acc) acc = fun.(description, acc) do
do
if rest == @rdf_nil do if rest == @rdf_nil do
case acc do case acc do
{:cont, acc} -> {:done, acc} {:cont, acc} -> {:done, acc}
# TODO: Is the :suspend case handled properly # TODO: Is the :suspend case handled properly
_ -> reduce(%RDF.List{head: rest, graph: graph}, acc, fun) _ -> reduce(%RDF.List{head: rest, graph: graph}, acc, fun)
end end
else else
reduce(%RDF.List{head: rest, graph: graph}, acc, fun) reduce(%RDF.List{head: rest, graph: graph}, acc, fun)
@ -246,13 +237,14 @@ defmodule RDF.List do
else else
nil -> nil ->
{:halted, nil} {:halted, nil}
values when is_list(values) -> values when is_list(values) ->
{:halted, nil} {:halted, nil}
end end
end end
def member?(_, _), do: {:error, __MODULE__} def member?(_, _), do: {:error, __MODULE__}
def count(_), do: {:error, __MODULE__} def count(_), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__} def slice(_), do: {:error, __MODULE__}
end end
end end

View File

@ -35,8 +35,10 @@ defmodule RDF.Literal do
def new(value) do def new(value) do
case coerce(value) do case coerce(value) do
nil -> nil ->
raise RDF.Literal.InvalidError, "#{inspect value} not convertible to a RDF.Literal" raise RDF.Literal.InvalidError, "#{inspect(value)} not convertible to a RDF.Literal"
literal -> literal
literal ->
literal
end end
end end
@ -58,7 +60,7 @@ defmodule RDF.Literal do
datatype = Keyword.get(opts, :datatype) -> datatype = Keyword.get(opts, :datatype) ->
case Datatype.get(datatype) do case Datatype.get(datatype) do
nil -> Generic.new(value, opts) nil -> Generic.new(value, opts)
datatype -> datatype.new(value, opts) datatype -> datatype.new(value, opts)
end end
@ -98,16 +100,16 @@ defmodule RDF.Literal do
def coerce(%__MODULE__{} = literal), do: literal def coerce(%__MODULE__{} = literal), do: literal
def coerce(value) when is_binary(value), do: RDF.XSD.String.new(value) def coerce(value) when is_binary(value), do: RDF.XSD.String.new(value)
def coerce(value) when is_boolean(value), do: RDF.XSD.Boolean.new(value) def coerce(value) when is_boolean(value), do: RDF.XSD.Boolean.new(value)
def coerce(value) when is_integer(value), do: RDF.XSD.Integer.new(value) def coerce(value) when is_integer(value), do: RDF.XSD.Integer.new(value)
def coerce(value) when is_float(value), do: RDF.XSD.Double.new(value) def coerce(value) when is_float(value), do: RDF.XSD.Double.new(value)
def coerce(%Decimal{} = value), do: RDF.XSD.Decimal.new(value) def coerce(%Decimal{} = value), do: RDF.XSD.Decimal.new(value)
def coerce(%Date{} = value), do: RDF.XSD.Date.new(value) def coerce(%Date{} = value), do: RDF.XSD.Date.new(value)
def coerce(%Time{} = value), do: RDF.XSD.Time.new(value) def coerce(%Time{} = value), do: RDF.XSD.Time.new(value)
def coerce(%DateTime{} = value), do: RDF.XSD.DateTime.new(value) def coerce(%DateTime{} = value), do: RDF.XSD.DateTime.new(value)
def coerce(%NaiveDateTime{} = value), do: RDF.XSD.DateTime.new(value) def coerce(%NaiveDateTime{} = value), do: RDF.XSD.DateTime.new(value)
def coerce(%URI{} = value), do: RDF.XSD.AnyURI.new(value) def coerce(%URI{} = value), do: RDF.XSD.AnyURI.new(value)
def coerce(value) when maybe_ns_term(value) do def coerce(value) when maybe_ns_term(value) do
case RDF.Namespace.resolve_term(value) do case RDF.Namespace.resolve_term(value) do
@ -132,7 +134,6 @@ defmodule RDF.Literal do
def coerce(_), do: nil def coerce(_), do: nil
@doc """ @doc """
Creates a new `RDF.Literal`, but fails if it's not valid. Creates a new `RDF.Literal`, but fails if it's not valid.
@ -154,10 +155,11 @@ defmodule RDF.Literal do
@spec new!(t | any, keyword) :: t @spec new!(t | any, keyword) :: t
def new!(value, opts \\ []) do def new!(value, opts \\ []) do
literal = new(value, opts) literal = new(value, opts)
if valid?(literal) do if valid?(literal) do
literal literal
else else
raise RDF.Literal.InvalidError, "invalid RDF.Literal: #{inspect literal}" raise RDF.Literal.InvalidError, "invalid RDF.Literal: #{inspect(literal)}"
end end
end end
@ -223,8 +225,7 @@ defmodule RDF.Literal do
""" """
@spec simple?(t) :: boolean @spec simple?(t) :: boolean
def simple?(%__MODULE__{literal: %RDF.XSD.String{}}), do: true def simple?(%__MODULE__{literal: %RDF.XSD.String{}}), do: true
def simple?(%__MODULE__{}), do: false def simple?(%__MODULE__{}), do: false
@doc """ @doc """
Returns if a literal is a plain literal. Returns if a literal is a plain literal.
@ -263,7 +264,7 @@ defmodule RDF.Literal do
@doc """ @doc """
Returns the lexical form of the given `literal`. Returns the lexical form of the given `literal`.
""" """
@spec lexical(t) :: String.t @spec lexical(t) :: String.t()
def lexical(%__MODULE__{literal: %datatype{} = literal}), do: datatype.lexical(literal) def lexical(%__MODULE__{literal: %datatype{} = literal}), do: datatype.lexical(literal)
@doc """ @doc """
@ -275,8 +276,9 @@ defmodule RDF.Literal do
@doc """ @doc """
Returns the canonical lexical of the given `literal`. Returns the canonical lexical of the given `literal`.
""" """
@spec canonical_lexical(t) :: String.t | nil @spec canonical_lexical(t) :: String.t() | nil
def canonical_lexical(%__MODULE__{literal: %datatype{} = literal}), do: datatype.canonical_lexical(literal) def canonical_lexical(%__MODULE__{literal: %datatype{} = literal}),
do: datatype.canonical_lexical(literal)
@doc """ @doc """
Returns if the lexical of the given `literal` has the canonical form. Returns if the lexical of the given `literal` has the canonical form.
@ -311,7 +313,7 @@ defmodule RDF.Literal do
def equal_value?(_, _), do: nil def equal_value?(_, _), do: nil
@spec compare(t, t) :: Datatype.comparison_result | :indeterminate | nil @spec compare(t, t) :: Datatype.comparison_result() | :indeterminate | nil
def compare(%__MODULE__{literal: %datatype{} = left}, right) do def compare(%__MODULE__{literal: %datatype{} = left}, right) do
datatype.compare(left, right) datatype.compare(left, right)
end end
@ -339,16 +341,21 @@ defmodule RDF.Literal do
see <https://www.w3.org/TR/xpath-functions/#func-matches> see <https://www.w3.org/TR/xpath-functions/#func-matches>
""" """
@spec matches?(t | String.t, pattern :: t | String.t, flags :: t | String.t) :: boolean @spec matches?(t | String.t(), pattern :: t | String.t(), flags :: t | String.t()) :: boolean
def matches?(value, pattern, flags \\ "") def matches?(value, pattern, flags \\ "")
def matches?(%__MODULE__{} = literal, pattern, flags), def matches?(%__MODULE__{} = literal, pattern, flags),
do: matches?(lexical(literal), pattern, flags) do: matches?(lexical(literal), pattern, flags)
def matches?(value, %__MODULE__{literal: %RDF.XSD.String{}} = pattern, flags), def matches?(value, %__MODULE__{literal: %RDF.XSD.String{}} = pattern, flags),
do: matches?(value, lexical(pattern), flags) do: matches?(value, lexical(pattern), flags)
def matches?(value, pattern, %__MODULE__{literal: %RDF.XSD.String{}} = flags), def matches?(value, pattern, %__MODULE__{literal: %RDF.XSD.String{}} = flags),
do: matches?(value, pattern, lexical(flags)) do: matches?(value, pattern, lexical(flags))
def matches?(value, pattern, flags) when is_binary(value) and is_binary(pattern) and is_binary(flags),
do: RDF.XSD.Utils.Regex.matches?(value, pattern, flags) def matches?(value, pattern, flags)
when is_binary(value) and is_binary(pattern) and is_binary(flags),
do: RDF.XSD.Utils.Regex.matches?(value, pattern, flags)
@doc """ @doc """
Updates the value of a `RDF.Literal` without changing everything else. Updates the value of a `RDF.Literal` without changing everything else.

View File

@ -53,7 +53,7 @@ defmodule RDF.Literal.Datatype do
A final catch-all clause should delegate to `super`. For example `RDF.XSD.Datatype`s will handle casting from derived A final catch-all clause should delegate to `super`. For example `RDF.XSD.Datatype`s will handle casting from derived
datatypes in the default implementation. datatypes in the default implementation.
""" """
@callback do_cast(literal | RDF.IRI.t | RDF.BlankNode.t) :: Literal.t() | nil @callback do_cast(literal | RDF.IRI.t() | RDF.BlankNode.t()) :: Literal.t() | nil
@doc """ @doc """
Checks if the given `RDF.Literal` has the datatype for which the `RDF.Literal.Datatype` is implemented or is derived from it. Checks if the given `RDF.Literal` has the datatype for which the `RDF.Literal.Datatype` is implemented or is derived from it.
@ -64,24 +64,24 @@ defmodule RDF.Literal.Datatype do
true true
""" """
@callback datatype?(Literal.t | t | literal) :: boolean @callback datatype?(Literal.t() | t | literal) :: boolean
@doc """ @doc """
The datatype IRI of the given `RDF.Literal`. The datatype IRI of the given `RDF.Literal`.
""" """
@callback datatype_id(Literal.t | literal) :: IRI.t() @callback datatype_id(Literal.t() | literal) :: IRI.t()
@doc """ @doc """
The language of the given `RDF.Literal` if present. The language of the given `RDF.Literal` if present.
""" """
@callback language(Literal.t | literal) :: String.t() | nil @callback language(Literal.t() | literal) :: String.t() | nil
@doc """ @doc """
Returns the value of a `RDF.Literal`. Returns the value of a `RDF.Literal`.
This function also accepts literals of derived datatypes. This function also accepts literals of derived datatypes.
""" """
@callback value(Literal.t | literal) :: any @callback value(Literal.t() | literal) :: any
@doc """ @doc """
Returns the lexical form of a `RDF.Literal`. Returns the lexical form of a `RDF.Literal`.
@ -178,7 +178,7 @@ defmodule RDF.Literal.Datatype do
iex> RDF.literal("foo", datatype: "http://example.com/dt") |> RDF.Literal.Generic.update(fn _ -> "bar" end) iex> RDF.literal("foo", datatype: "http://example.com/dt") |> RDF.Literal.Generic.update(fn _ -> "bar" end)
RDF.literal("bar", datatype: "http://example.com/dt") RDF.literal("bar", datatype: "http://example.com/dt")
""" """
@callback update(Literal.t() | literal, fun()) :: Literal.t @callback update(Literal.t() | literal, fun()) :: Literal.t()
@doc """ @doc """
Updates the value of a `RDF.Literal` without changing anything else. Updates the value of a `RDF.Literal` without changing anything else.
@ -192,7 +192,7 @@ defmodule RDF.Literal.Datatype do
...> fn value -> value <> "1" end, as: :lexical) ...> fn value -> value <> "1" end, as: :lexical)
RDF.XSD.integer(421) RDF.XSD.integer(421)
""" """
@callback update(Literal.t() | literal, fun(), keyword) :: Literal.t @callback update(Literal.t() | literal, fun(), keyword) :: Literal.t()
@doc """ @doc """
Returns the `RDF.Literal.Datatype` for a datatype IRI. Returns the `RDF.Literal.Datatype` for a datatype IRI.
@ -200,14 +200,13 @@ defmodule RDF.Literal.Datatype do
defdelegate get(id), to: Literal.Datatype.Registry, as: :datatype defdelegate get(id), to: Literal.Datatype.Registry, as: :datatype
@doc !""" @doc !"""
As opposed to RDF.Literal.valid?/1 this function operates on the datatype structs ... As opposed to RDF.Literal.valid?/1 this function operates on the datatype structs ...
It's meant for internal use only and doesn't perform checks if the struct It's meant for internal use only and doesn't perform checks if the struct
passed is actually a `RDF.Literal.Datatype` struct. passed is actually a `RDF.Literal.Datatype` struct.
""" """
def valid?(%datatype{} = datatype_literal), do: datatype.valid?(datatype_literal) def valid?(%datatype{} = datatype_literal), do: datatype.valid?(datatype_literal)
defmacro __using__(opts) do defmacro __using__(opts) do
name = Keyword.fetch!(opts, :name) name = Keyword.fetch!(opts, :name)
id = Keyword.fetch!(opts, :id) id = Keyword.fetch!(opts, :id)
@ -227,10 +226,10 @@ defmodule RDF.Literal.Datatype do
@behaviour unquote(__MODULE__) @behaviour unquote(__MODULE__)
@doc !""" @doc !"""
This function is just used to check if a module is a RDF.Literal.Datatype. This function is just used to check if a module is a RDF.Literal.Datatype.
See `RDF.Literal.Datatype.Registry.is_rdf_literal_datatype?/1`. See `RDF.Literal.Datatype.Registry.is_rdf_literal_datatype?/1`.
""" """
def __rdf_literal_datatype_indicator__, do: true def __rdf_literal_datatype_indicator__, do: true
@name unquote(name) @name unquote(name)
@ -274,6 +273,7 @@ defmodule RDF.Literal.Datatype do
literal |> canonical() |> lexical() literal |> canonical() |> lexical()
end end
end end
def canonical_lexical(_), do: nil def canonical_lexical(_), do: nil
@doc """ @doc """
@ -284,15 +284,17 @@ defmodule RDF.Literal.Datatype do
Implementations define the casting for a given value with the `c:RDF.Literal.Datatype.do_cast/1` callback. Implementations define the casting for a given value with the `c:RDF.Literal.Datatype.do_cast/1` callback.
""" """
@spec cast(Literal.Datatype.literal | RDF.Term.t) :: Literal.t() | nil @spec cast(Literal.Datatype.literal() | RDF.Term.t()) :: Literal.t() | nil
@dialyzer {:nowarn_function, cast: 1} @dialyzer {:nowarn_function, cast: 1}
def cast(literal_or_value) def cast(literal_or_value)
def cast(%Literal{literal: literal}), do: cast(literal) def cast(%Literal{literal: literal}), do: cast(literal)
def cast(%__MODULE__{} = datatype_literal), def cast(%__MODULE__{} = datatype_literal),
do: if(valid?(datatype_literal), do: literal(datatype_literal)) do: if(valid?(datatype_literal), do: literal(datatype_literal))
def cast(%struct{} = datatype_literal) do def cast(%struct{} = datatype_literal) do
if (Literal.datatype?(struct) and Literal.Datatype.valid?(datatype_literal)) or if (Literal.datatype?(struct) and Literal.Datatype.valid?(datatype_literal)) or
struct in [RDF.IRI, RDF.BlankNode] do struct in [RDF.IRI, RDF.BlankNode] do
case do_cast(datatype_literal) do case do_cast(datatype_literal) do
%__MODULE__{} = literal -> if valid?(literal), do: literal(literal) %__MODULE__{} = literal -> if valid?(literal), do: literal(literal)
%Literal{literal: %__MODULE__{}} = literal -> if valid?(literal), do: literal %Literal{literal: %__MODULE__{}} = literal -> if valid?(literal), do: literal
@ -325,10 +327,15 @@ defmodule RDF.Literal.Datatype do
def equal_value?(%Literal{literal: left}, right), do: equal_value?(left, right) def equal_value?(%Literal{literal: left}, right), do: equal_value?(left, right)
def equal_value?(nil, _), do: nil def equal_value?(nil, _), do: nil
def equal_value?(_, nil), do: nil def equal_value?(_, nil), do: nil
def equal_value?(left, right) do def equal_value?(left, right) do
cond do cond do
not Literal.datatype?(right) and not resource?(right) -> equal_value?(left, Literal.coerce(right)) not Literal.datatype?(right) and not resource?(right) ->
not Literal.datatype?(left) and not resource?(left) -> equal_value?(Literal.coerce(left), right) equal_value?(left, Literal.coerce(right))
not Literal.datatype?(left) and not resource?(left) ->
equal_value?(Literal.coerce(left), right)
true -> true ->
left_datatype = left.__struct__ left_datatype = left.__struct__
right_datatype = right.__struct__ right_datatype = right.__struct__
@ -343,6 +350,7 @@ defmodule RDF.Literal.Datatype do
case equality_path(left_datatype, right_datatype) do case equality_path(left_datatype, right_datatype) do
{:same_or_derived, datatype} -> {:same_or_derived, datatype} ->
datatype.do_equal_value_same_or_derived_datatypes?(left, right) datatype.do_equal_value_same_or_derived_datatypes?(left, right)
{:different, datatype} -> {:different, datatype} ->
datatype.do_equal_value_different_datatypes?(left, right) datatype.do_equal_value_different_datatypes?(left, right)
end end
@ -381,14 +389,15 @@ defmodule RDF.Literal.Datatype do
# RDF.XSD.Datatypes offers another default implementation, but since it is # RDF.XSD.Datatypes offers another default implementation, but since it is
# still in a macro implementation defoverridable doesn't work # still in a macro implementation defoverridable doesn't work
unless RDF.XSD.Datatype in @behaviour do unless RDF.XSD.Datatype in @behaviour do
@spec compare(RDF.Literal.t() | any, RDF.Literal.t() | any) :: RDF.Literal.Datatype.comparison_result | :indeterminate | nil @spec compare(RDF.Literal.t() | any, RDF.Literal.t() | any) ::
RDF.Literal.Datatype.comparison_result() | :indeterminate | nil
def compare(left, right) def compare(left, right)
def compare(left, %RDF.Literal{literal: right}), do: compare(left, right) def compare(left, %RDF.Literal{literal: right}), do: compare(left, right)
def compare(%RDF.Literal{literal: left}, right), do: compare(left, right) def compare(%RDF.Literal{literal: left}, right), do: compare(left, right)
def compare(left, right) do def compare(left, right) do
if RDF.Literal.datatype?(left) and RDF.Literal.datatype?(right) and if RDF.Literal.datatype?(left) and RDF.Literal.datatype?(right) and
RDF.Literal.Datatype.valid?(left) and RDF.Literal.Datatype.valid?(right) do RDF.Literal.Datatype.valid?(left) and RDF.Literal.Datatype.valid?(right) do
do_compare(left, right) do_compare(left, right)
end end
end end
@ -396,8 +405,12 @@ defmodule RDF.Literal.Datatype do
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_compare(%datatype{} = left, %datatype{} = right) do def do_compare(%datatype{} = left, %datatype{} = right) do
case {datatype.value(left), datatype.value(right)} do case {datatype.value(left), datatype.value(right)} do
{left_value, right_value} when left_value < right_value -> :lt {left_value, right_value} when left_value < right_value ->
{left_value, right_value} when left_value > right_value -> :gt :lt
{left_value, right_value} when left_value > right_value ->
:gt
_ -> _ ->
if datatype.equal_value?(left, right), do: :eq if datatype.equal_value?(left, right), do: :eq
end end
@ -415,6 +428,7 @@ defmodule RDF.Literal.Datatype do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def update(literal, fun, opts \\ []) def update(literal, fun, opts \\ [])
def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts) def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts)
def update(%__MODULE__{} = literal, fun, opts) do def update(%__MODULE__{} = literal, fun, opts) do
case Keyword.get(opts, :as) do case Keyword.get(opts, :as) do
:lexical -> lexical(literal) :lexical -> lexical(literal)
@ -449,7 +463,7 @@ defmodule RDF.Literal.Datatype do
import ProtocolEx import ProtocolEx
defimpl_ex Registration, unquote(unquoted_id), defimpl_ex Registration, unquote(unquoted_id),
for: RDF.Literal.Datatype.Registry.Registration do for: RDF.Literal.Datatype.Registry.Registration do
@moduledoc false @moduledoc false
def datatype(id), do: unquote(datatype) def datatype(id), do: unquote(datatype)

View File

@ -13,37 +13,38 @@ defmodule RDF.Literal.Datatype.Registry do
RDF.XSD.Double RDF.XSD.Double
] ]
@builtin_numeric_datatypes @primitive_numeric_datatypes ++ [ @builtin_numeric_datatypes @primitive_numeric_datatypes ++
RDF.XSD.Long, [
RDF.XSD.Int, RDF.XSD.Long,
RDF.XSD.Short, RDF.XSD.Int,
RDF.XSD.Byte, RDF.XSD.Short,
RDF.XSD.NonNegativeInteger, RDF.XSD.Byte,
RDF.XSD.PositiveInteger, RDF.XSD.NonNegativeInteger,
RDF.XSD.UnsignedLong, RDF.XSD.PositiveInteger,
RDF.XSD.UnsignedInt, RDF.XSD.UnsignedLong,
RDF.XSD.UnsignedShort, RDF.XSD.UnsignedInt,
RDF.XSD.UnsignedByte, RDF.XSD.UnsignedShort,
RDF.XSD.NonPositiveInteger, RDF.XSD.UnsignedByte,
RDF.XSD.NegativeInteger, RDF.XSD.NonPositiveInteger,
RDF.XSD.Float RDF.XSD.NegativeInteger,
] RDF.XSD.Float
]
@builtin_xsd_datatypes [ @builtin_xsd_datatypes [
XSD.Boolean, XSD.Boolean,
XSD.String, XSD.String,
XSD.Date, XSD.Date,
XSD.Time, XSD.Time,
XSD.DateTime, XSD.DateTime,
XSD.AnyURI XSD.AnyURI
] ++ @builtin_numeric_datatypes ] ++ @builtin_numeric_datatypes
@builtin_datatypes [RDF.LangString | @builtin_xsd_datatypes] @builtin_datatypes [RDF.LangString | @builtin_xsd_datatypes]
@doc """ @doc """
Returns a list of all builtin `RDF.Literal.Datatype` modules. Returns a list of all builtin `RDF.Literal.Datatype` modules.
""" """
@spec builtin_datatypes :: [RDF.Literal.Datatype.t] @spec builtin_datatypes :: [RDF.Literal.Datatype.t()]
def builtin_datatypes, do: @builtin_datatypes def builtin_datatypes, do: @builtin_datatypes
@doc """ @doc """
@ -63,7 +64,7 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Checks if the given module is a builtin datatype or a registered custom datatype implementing the `RDF.Literal.Datatype` behaviour. Checks if the given module is a builtin datatype or a registered custom datatype implementing the `RDF.Literal.Datatype` behaviour.
""" """
@spec datatype?(Literal.t | Literal.Datatype.literal | module) :: boolean @spec datatype?(Literal.t() | Literal.Datatype.literal() | module) :: boolean
def datatype?(value) def datatype?(value)
# We assume literals were created properly which means they have a proper RDF.Literal.Datatype # We assume literals were created properly which means they have a proper RDF.Literal.Datatype
@ -71,7 +72,7 @@ defmodule RDF.Literal.Datatype.Registry do
def datatype?(value), do: datatype_struct?(value) def datatype?(value), do: datatype_struct?(value)
@doc false @doc false
@spec datatype_struct?(Literal.Datatype.literal | module) :: boolean @spec datatype_struct?(Literal.Datatype.literal() | module) :: boolean
def datatype_struct?(value) def datatype_struct?(value)
def datatype_struct?(%datatype{}), do: datatype_struct?(datatype) def datatype_struct?(%datatype{}), do: datatype_struct?(datatype)
@ -87,7 +88,7 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Returns a list of all builtin `RDF.XSD.Datatype` modules. Returns a list of all builtin `RDF.XSD.Datatype` modules.
""" """
@spec builtin_xsd_datatypes :: [RDF.Literal.Datatype.t] @spec builtin_xsd_datatypes :: [RDF.Literal.Datatype.t()]
def builtin_xsd_datatypes, do: @builtin_xsd_datatypes def builtin_xsd_datatypes, do: @builtin_xsd_datatypes
@doc false @doc false
@ -103,13 +104,13 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Checks if the given module is a builtin XSD datatype or a registered custom datatype implementing the `RDF.XSD.Datatype` behaviour. Checks if the given module is a builtin XSD datatype or a registered custom datatype implementing the `RDF.XSD.Datatype` behaviour.
""" """
@spec xsd_datatype?(Literal.t | XSD.Datatype.literal | module) :: boolean @spec xsd_datatype?(Literal.t() | XSD.Datatype.literal() | module) :: boolean
def xsd_datatype?(value) def xsd_datatype?(value)
def xsd_datatype?(%Literal{literal: datatype_struct}), do: xsd_datatype?(datatype_struct) def xsd_datatype?(%Literal{literal: datatype_struct}), do: xsd_datatype?(datatype_struct)
def xsd_datatype?(value), do: xsd_datatype_struct?(value) def xsd_datatype?(value), do: xsd_datatype_struct?(value)
@doc false @doc false
@spec xsd_datatype_struct?(RDF.Literal.t() | XSD.Datatype.literal | module) :: boolean @spec xsd_datatype_struct?(RDF.Literal.t() | XSD.Datatype.literal() | module) :: boolean
def xsd_datatype_struct?(value) def xsd_datatype_struct?(value)
def xsd_datatype_struct?(%datatype{}), do: xsd_datatype_struct?(datatype) def xsd_datatype_struct?(%datatype{}), do: xsd_datatype_struct?(datatype)
@ -123,13 +124,13 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Returns a list of all numeric datatype modules. Returns a list of all numeric datatype modules.
""" """
@spec builtin_numeric_datatypes() :: [RDF.Literal.Datatype.t] @spec builtin_numeric_datatypes() :: [RDF.Literal.Datatype.t()]
def builtin_numeric_datatypes(), do: @builtin_numeric_datatypes def builtin_numeric_datatypes(), do: @builtin_numeric_datatypes
@doc """ @doc """
The set of all primitive numeric datatypes. The set of all primitive numeric datatypes.
""" """
@spec primitive_numeric_datatypes() :: [RDF.Literal.Datatype.t] @spec primitive_numeric_datatypes() :: [RDF.Literal.Datatype.t()]
def primitive_numeric_datatypes(), do: @primitive_numeric_datatypes def primitive_numeric_datatypes(), do: @primitive_numeric_datatypes
@doc false @doc false
@ -142,7 +143,6 @@ defmodule RDF.Literal.Datatype.Registry do
def builtin_numeric_datatype?(_), do: false def builtin_numeric_datatype?(_), do: false
@doc """ @doc """
Returns if a given literal or datatype has or is a numeric datatype. Returns if a given literal or datatype has or is a numeric datatype.
""" """
@ -152,12 +152,11 @@ defmodule RDF.Literal.Datatype.Registry do
def numeric_datatype?(%datatype{}), do: numeric_datatype?(datatype) def numeric_datatype?(%datatype{}), do: numeric_datatype?(datatype)
def numeric_datatype?(datatype) when maybe_module(datatype) do def numeric_datatype?(datatype) when maybe_module(datatype) do
builtin_numeric_datatype?(datatype) or ( builtin_numeric_datatype?(datatype) or
xsd_datatype?(datatype) and (xsd_datatype?(datatype) and
Enum.any?(@primitive_numeric_datatypes, fn numeric_primitive -> Enum.any?(@primitive_numeric_datatypes, fn numeric_primitive ->
datatype.derived_from?(numeric_primitive) datatype.derived_from?(numeric_primitive)
end) end))
)
end end
def numeric_datatype?(_), do: false def numeric_datatype?(_), do: false
@ -165,7 +164,7 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Returns the `RDF.Literal.Datatype` for a datatype IRI. Returns the `RDF.Literal.Datatype` for a datatype IRI.
""" """
@spec datatype(Literal.t | IRI.t | String.t) :: Literal.Datatype.t @spec datatype(Literal.t() | IRI.t() | String.t()) :: Literal.Datatype.t()
def datatype(%Literal{} = literal), do: literal.literal.__struct__ def datatype(%Literal{} = literal), do: literal.literal.__struct__
def datatype(%IRI{} = id), do: id |> to_string() |> datatype() def datatype(%IRI{} = id), do: id |> to_string() |> datatype()
def datatype(id) when maybe_ns_term(id), do: id |> Namespace.resolve_term!() |> datatype() def datatype(id) when maybe_ns_term(id), do: id |> Namespace.resolve_term!() |> datatype()
@ -174,7 +173,7 @@ defmodule RDF.Literal.Datatype.Registry do
@doc """ @doc """
Returns the `RDF.XSD.Datatype` for a datatype IRI. Returns the `RDF.XSD.Datatype` for a datatype IRI.
""" """
@spec xsd_datatype(Literal.t | IRI.t | String.t) :: XSD.Datatype.t @spec xsd_datatype(Literal.t() | IRI.t() | String.t()) :: XSD.Datatype.t()
def xsd_datatype(id) do def xsd_datatype(id) do
datatype = datatype(id) datatype = datatype(id)

View File

@ -14,16 +14,18 @@ defmodule RDF.Literal.Generic do
import RDF.Guards import RDF.Guards
@type t :: %__MODULE__{ @type t :: %__MODULE__{
value: String.t, value: String.t(),
datatype: String.t datatype: String.t()
} }
@impl Datatype @impl Datatype
@spec new(any, String.t | IRI.t | keyword) :: Literal.t @spec new(any, String.t() | IRI.t() | keyword) :: Literal.t()
def new(value, datatype_or_opts \\ []) def new(value, datatype_or_opts \\ [])
def new(value, %IRI{} = datatype), do: new(value, datatype: datatype) def new(value, %IRI{} = datatype), do: new(value, datatype: datatype)
def new(value, datatype) when is_binary(datatype) or maybe_ns_term(datatype), def new(value, datatype) when is_binary(datatype) or maybe_ns_term(datatype),
do: new(value, datatype: datatype) do: new(value, datatype: datatype)
def new(value, opts) do def new(value, opts) do
%Literal{ %Literal{
literal: %__MODULE__{ literal: %__MODULE__{
@ -36,18 +38,24 @@ defmodule RDF.Literal.Generic do
defp normalize_datatype(nil), do: nil defp normalize_datatype(nil), do: nil
defp normalize_datatype(""), do: nil defp normalize_datatype(""), do: nil
defp normalize_datatype(%IRI{} = datatype), do: to_string(datatype) defp normalize_datatype(%IRI{} = datatype), do: to_string(datatype)
defp normalize_datatype(datatype) when maybe_ns_term(datatype), do: datatype |> RDF.iri() |> to_string()
defp normalize_datatype(datatype) when maybe_ns_term(datatype),
do: datatype |> RDF.iri() |> to_string()
defp normalize_datatype(datatype), do: datatype defp normalize_datatype(datatype), do: datatype
@impl Datatype @impl Datatype
@spec new!(any, String.t | IRI.t | keyword) :: Literal.t @spec new!(any, String.t() | IRI.t() | keyword) :: Literal.t()
def new!(value, datatype_or_opts \\ []) do def new!(value, datatype_or_opts \\ []) do
literal = new(value, datatype_or_opts) literal = new(value, datatype_or_opts)
if valid?(literal) do if valid?(literal) do
literal literal
else else
raise ArgumentError, "#{inspect(value)} with datatype #{inspect literal.literal.datatype} is not a valid #{inspect(__MODULE__)}" raise ArgumentError,
"#{inspect(value)} with datatype #{inspect(literal.literal.datatype)} is not a valid #{
inspect(__MODULE__)
}"
end end
end end
@ -88,15 +96,23 @@ defmodule RDF.Literal.Generic do
def do_equal_value_same_or_derived_datatypes?( def do_equal_value_same_or_derived_datatypes?(
%{datatype: datatype} = left, %{datatype: datatype} = left,
%{datatype: datatype} = right %{datatype: datatype} = right
), do: left == right ),
do: left == right
def do_equal_value_same_or_derived_datatypes?(_, _), do: nil def do_equal_value_same_or_derived_datatypes?(_, _), do: nil
@impl Datatype @impl Datatype
def do_compare(%__MODULE__{datatype: datatype} = left_literal, def do_compare(
%__MODULE__{datatype: datatype} = right_literal) do %__MODULE__{datatype: datatype} = left_literal,
%__MODULE__{datatype: datatype} = right_literal
) do
case {left_literal.value, right_literal.value} do case {left_literal.value, right_literal.value} do
{left_value, right_value} when left_value < right_value -> :lt {left_value, right_value} when left_value < right_value ->
{left_value, right_value} when left_value > right_value -> :gt :lt
{left_value, right_value} when left_value > right_value ->
:gt
_ -> _ ->
if equal_value?(left_literal, right_literal), do: :eq if equal_value?(left_literal, right_literal), do: :eq
end end
@ -107,6 +123,7 @@ defmodule RDF.Literal.Generic do
@impl Datatype @impl Datatype
def update(literal, fun, opts \\ []) def update(literal, fun, opts \\ [])
def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts) def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts)
def update(%__MODULE__{} = literal, fun, _opts) do def update(%__MODULE__{} = literal, fun, _opts) do
literal literal
|> value() |> value()

View File

@ -6,8 +6,8 @@ defmodule RDF.LangString do
defstruct [:value, :language] defstruct [:value, :language]
use RDF.Literal.Datatype, use RDF.Literal.Datatype,
name: "langString", name: "langString",
id: RDF.Utils.Bootstrapping.rdf_iri("langString") id: RDF.Utils.Bootstrapping.rdf_iri("langString")
import RDF.Utils.Guards import RDF.Utils.Guards
@ -15,18 +15,19 @@ defmodule RDF.LangString do
alias RDF.Literal alias RDF.Literal
@type t :: %__MODULE__{ @type t :: %__MODULE__{
value: String.t, value: String.t(),
language: String.t language: String.t()
} }
@doc """ @doc """
Creates a new `RDF.Literal` with this datatype and the given `value` and `language`. Creates a new `RDF.Literal` with this datatype and the given `value` and `language`.
""" """
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
@spec new(any, String.t | atom | keyword) :: Literal.t @spec new(any, String.t() | atom | keyword) :: Literal.t()
def new(value, language_or_opts \\ []) def new(value, language_or_opts \\ [])
def new(value, language) when is_binary(language), do: new(value, language: language) def new(value, language) when is_binary(language), do: new(value, language: language)
def new(value, language) when is_ordinary_atom(language), do: new(value, language: language) def new(value, language) when is_ordinary_atom(language), do: new(value, language: language)
def new(value, opts) do def new(value, opts) do
%Literal{ %Literal{
literal: %__MODULE__{ literal: %__MODULE__{
@ -38,18 +39,24 @@ defmodule RDF.LangString do
defp normalize_language(nil), do: nil defp normalize_language(nil), do: nil
defp normalize_language(""), do: nil defp normalize_language(""), do: nil
defp normalize_language(language) when is_ordinary_atom(language), do: language |> to_string() |> normalize_language()
defp normalize_language(language) when is_ordinary_atom(language),
do: language |> to_string() |> normalize_language()
defp normalize_language(language), do: String.downcase(language) defp normalize_language(language), do: String.downcase(language)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
@spec new!(any, String.t | atom | keyword) :: Literal.t @spec new!(any, String.t() | atom | keyword) :: Literal.t()
def new!(value, language_or_opts \\ []) do def new!(value, language_or_opts \\ []) do
literal = new(value, language_or_opts) literal = new(value, language_or_opts)
if valid?(literal) do if valid?(literal) do
literal literal
else else
raise ArgumentError, "#{inspect(value)} with language #{inspect literal.literal.language} is not a valid #{inspect(__MODULE__)}" raise ArgumentError,
"#{inspect(value)} with language #{inspect(literal.literal.language)} is not a valid #{
inspect(__MODULE__)
}"
end end
end end
@ -84,6 +91,7 @@ defmodule RDF.LangString do
@impl Datatype @impl Datatype
def update(literal, fun, opts \\ []) def update(literal, fun, opts \\ [])
def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts) def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts)
def update(%__MODULE__{} = literal, fun, _opts) do def update(%__MODULE__{} = literal, fun, _opts) do
literal literal
|> value() |> value()
@ -102,12 +110,14 @@ defmodule RDF.LangString do
see <https://www.w3.org/TR/sparql11-query/#func-langMatches> see <https://www.w3.org/TR/sparql11-query/#func-langMatches>
""" """
@spec match_language?(Literal.t | t() | String.t, String.t) :: boolean @spec match_language?(Literal.t() | t() | String.t(), String.t()) :: boolean
def match_language?(language_tag, language_range) def match_language?(language_tag, language_range)
def match_language?(%Literal{literal: literal}, language_range), def match_language?(%Literal{literal: literal}, language_range),
do: match_language?(literal, language_range) do: match_language?(literal, language_range)
def match_language?(%__MODULE__{language: nil}, _), do: false def match_language?(%__MODULE__{language: nil}, _), do: false
def match_language?(%__MODULE__{language: language_tag}, language_range), def match_language?(%__MODULE__{language: language_tag}, language_range),
do: match_language?(language_tag, language_range) do: match_language?(language_tag, language_range)
@ -121,7 +131,7 @@ defmodule RDF.LangString do
case String.split(language_tag, language_range, parts: 2) do case String.split(language_tag, language_range, parts: 2) do
[_, rest] -> rest == "" or String.starts_with?(rest, "-") [_, rest] -> rest == "" or String.starts_with?(rest, "-")
_ -> false _ -> false
end end
end end

View File

@ -14,14 +14,13 @@ defmodule RDF.Namespace do
@doc """ @doc """
Resolves a term to a `RDF.IRI`. Resolves a term to a `RDF.IRI`.
""" """
@callback __resolve_term__(atom) :: {:ok, IRI.t} | {:error, Exception.t} @callback __resolve_term__(atom) :: {:ok, IRI.t()} | {:error, Exception.t()}
@doc """ @doc """
All terms of a `RDF.Namespace`. All terms of a `RDF.Namespace`.
""" """
@callback __terms__() :: [atom] @callback __terms__() :: [atom]
@doc """ @doc """
Resolves a qualified term to a `RDF.IRI`. Resolves a qualified term to a `RDF.IRI`.
@ -29,7 +28,7 @@ defmodule RDF.Namespace do
delegates to remaining part of the term to `__resolve_term__/1` of this delegates to remaining part of the term to `__resolve_term__/1` of this
determined namespace. determined namespace.
""" """
@spec resolve_term(IRI.t | module) :: {:ok, IRI.t} | {:error, Exception.t} @spec resolve_term(IRI.t() | module) :: {:ok, IRI.t()} | {:error, Exception.t()}
def resolve_term(expr) def resolve_term(expr)
def resolve_term(%IRI{} = iri), do: {:ok, iri} def resolve_term(%IRI{} = iri), do: {:ok, iri}
@ -45,7 +44,7 @@ defmodule RDF.Namespace do
See `resolve_term/1` for more. See `resolve_term/1` for more.
""" """
@spec resolve_term!(IRI.t | module) :: IRI.t @spec resolve_term!(IRI.t() | module) :: IRI.t()
def resolve_term!(expr) do def resolve_term!(expr) do
with {:ok, iri} <- resolve_term(expr) do with {:ok, iri} <- resolve_term(expr) do
iri iri
@ -57,7 +56,7 @@ defmodule RDF.Namespace do
defp do_resolve_term("Elixir." <> _ = namespaced_term) do defp do_resolve_term("Elixir." <> _ = namespaced_term) do
{term, namespace} = {term, namespace} =
namespaced_term namespaced_term
|> Module.split |> Module.split()
|> List.pop_at(-1) |> List.pop_at(-1)
do_resolve_term(Module.concat(namespace), String.to_atom(term)) do_resolve_term(Module.concat(namespace), String.to_atom(term))
@ -65,20 +64,18 @@ defmodule RDF.Namespace do
defp do_resolve_term(namespaced_term) do defp do_resolve_term(namespaced_term) do
{:error, {:error,
%RDF.Namespace.UndefinedTermError{ %RDF.Namespace.UndefinedTermError{
message: "#{namespaced_term} is not a term on a RDF.Namespace" message: "#{namespaced_term} is not a term on a RDF.Namespace"
} }}
}
end end
defp do_resolve_term(RDF, term), do: do_resolve_term(RDF.NS.RDF, term) defp do_resolve_term(RDF, term), do: do_resolve_term(RDF.NS.RDF, term)
defp do_resolve_term(Elixir, term) do defp do_resolve_term(Elixir, term) do
{:error, {:error,
%RDF.Namespace.UndefinedTermError{message: %RDF.Namespace.UndefinedTermError{
"#{term} is not a RDF.Namespace; top-level modules can't be RDF.Namespaces" message: "#{term} is not a RDF.Namespace; top-level modules can't be RDF.Namespaces"
} }}
}
end end
defp do_resolve_term(namespace, term) do defp do_resolve_term(namespace, term) do
@ -91,9 +88,7 @@ defmodule RDF.Namespace do
if is_module and Keyword.has_key?(namespace.__info__(:functions), :__resolve_term__) do if is_module and Keyword.has_key?(namespace.__info__(:functions), :__resolve_term__) do
namespace.__resolve_term__(term) namespace.__resolve_term__(term)
else else
{:error, {:error, %RDF.Namespace.UndefinedTermError{message: "#{namespace} is not a RDF.Namespace"}}
%RDF.Namespace.UndefinedTermError{message: "#{namespace} is not a RDF.Namespace"}
}
end end
end end
end end

View File

@ -26,7 +26,7 @@ defmodule RDF.NS do
base_iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#", base_iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
file: "rdf.ttl", file: "rdf.ttl",
alias: [ alias: [
Nil: "nil", Nil: "nil",
LangString: "langString" LangString: "langString"
] ]

View File

@ -7,11 +7,11 @@ defmodule RDF.PrefixMap do
alias RDF.IRI alias RDF.IRI
@type prefix :: atom @type prefix :: atom
@type namespace :: IRI.t @type namespace :: IRI.t()
@type coercible_prefix :: atom | String.t @type coercible_prefix :: atom | String.t()
@type coercible_namespace :: atom | String.t | IRI.t @type coercible_namespace :: atom | String.t() | IRI.t()
@type prefix_map :: %{prefix => namespace} @type prefix_map :: %{prefix => namespace}
@ -20,11 +20,10 @@ defmodule RDF.PrefixMap do
@type t :: %__MODULE__{ @type t :: %__MODULE__{
map: prefix_map map: prefix_map
} }
defstruct map: %{} defstruct map: %{}
@doc """ @doc """
Creates an empty `RDF.PrefixMap`. Creates an empty `RDF.PrefixMap`.
""" """
@ -67,7 +66,7 @@ defmodule RDF.PrefixMap do
Unless a mapping of the given prefix to a different namespace already exists, Unless a mapping of the given prefix to a different namespace already exists,
an ok tuple is returned, other an error tuple. an ok tuple is returned, other an error tuple.
""" """
@spec add(t, coercible_prefix, coercible_namespace) :: {:ok, t} | {:error, String.t} @spec add(t, coercible_prefix, coercible_namespace) :: {:ok, t} | {:error, String.t()}
def add(prefix_map, prefix, namespace) def add(prefix_map, prefix, namespace)
def add(%__MODULE__{map: map}, prefix, %IRI{} = namespace) when is_atom(prefix) do def add(%__MODULE__{map: map}, prefix, %IRI{} = namespace) when is_atom(prefix) do
@ -109,7 +108,7 @@ defmodule RDF.PrefixMap do
See also `merge/3` which allows you to resolve conflicts with a function. See also `merge/3` which allows you to resolve conflicts with a function.
""" """
@spec merge(t, t | map | keyword) :: {:ok, t} | {:error, [atom | String.t]} @spec merge(t, t | map | keyword) :: {:ok, t} | {:error, [atom | String.t()]}
def merge(prefix_map1, prefix_map2) def merge(prefix_map1, prefix_map2)
def merge(%__MODULE__{map: map1}, %__MODULE__{map: map2}) do def merge(%__MODULE__{map: map1}, %__MODULE__{map: map2}) do
@ -149,7 +148,8 @@ defmodule RDF.PrefixMap do
If everything could be merged, an `:ok` tuple is returned. If everything could be merged, an `:ok` tuple is returned.
""" """
@spec merge(t, t | map | keyword, conflict_resolver | nil) :: {:ok, t} | {:error, [atom | String.t]} @spec merge(t, t | map | keyword, conflict_resolver | nil) ::
{:ok, t} | {:error, [atom | String.t()]}
def merge(prefix_map1, prefix_map2, conflict_resolver) def merge(prefix_map1, prefix_map2, conflict_resolver)
def merge(%__MODULE__{map: map1}, %__MODULE__{map: map2}, conflict_resolver) def merge(%__MODULE__{map: map1}, %__MODULE__{map: map2}, conflict_resolver)

View File

@ -8,14 +8,14 @@ defmodule RDF.Quad do
alias RDF.Statement alias RDF.Statement
@type t :: {Statement.subject, Statement.predicate, Statement.object, Statement.graph_name} @type t ::
{Statement.subject(), Statement.predicate(), Statement.object(), Statement.graph_name()}
@type coercible_t :: @type coercible_t ::
{Statement.coercible_subject, Statement.coercible_predicate, {Statement.coercible_subject(), Statement.coercible_predicate(),
Statement.coercible_object, Statement.coercible_graph_name} Statement.coercible_object(), Statement.coercible_graph_name()}
@type t_values :: {String.t, String.t, any, String.t}
@type t_values :: {String.t(), String.t(), any, String.t()}
@doc """ @doc """
Creates a `RDF.Quad` with proper RDF values. Creates a `RDF.Quad` with proper RDF values.
@ -32,10 +32,10 @@ defmodule RDF.Quad do
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")} {RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")}
""" """
@spec new( @spec new(
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object, Statement.coercible_object(),
Statement.coercible_graph_name Statement.coercible_graph_name()
) :: t ) :: t
def new(subject, predicate, object, graph_context) do def new(subject, predicate, object, graph_context) do
{ {
@ -64,7 +64,6 @@ defmodule RDF.Quad do
def new({subject, predicate, object, graph_context}), def new({subject, predicate, object, graph_context}),
do: new(subject, predicate, object, graph_context) do: new(subject, predicate, object, graph_context)
@doc """ @doc """
Returns a tuple of native Elixir values from a `RDF.Quad` of RDF terms. Returns a tuple of native Elixir values from a `RDF.Quad` of RDF terms.
@ -94,15 +93,14 @@ defmodule RDF.Quad do
{:S, :p, 42, ~I<http://example.com/Graph>} {:S, :p, 42, ~I<http://example.com/Graph>}
""" """
@spec values(t | any, Statement.term_mapping) :: t_values | nil @spec values(t | any, Statement.term_mapping()) :: t_values | nil
def values(quad, mapping \\ &Statement.default_term_mapping/1) def values(quad, mapping \\ &Statement.default_term_mapping/1)
def values({subject, predicate, object, graph_context}, mapping) do def values({subject, predicate, object, graph_context}, mapping) do
with subject_value when not is_nil(subject_value) <- mapping.({:subject, subject}), with subject_value when not is_nil(subject_value) <- mapping.({:subject, subject}),
predicate_value when not is_nil(predicate_value) <- mapping.({:predicate, predicate}), predicate_value when not is_nil(predicate_value) <- mapping.({:predicate, predicate}),
object_value when not is_nil(object_value) <- mapping.({:object, object}), object_value when not is_nil(object_value) <- mapping.({:object, object}),
graph_context_value <- mapping.({:graph_name, graph_context}) graph_context_value <- mapping.({:graph_name, graph_context}) do
do
{subject_value, predicate_value, object_value, graph_context_value} {subject_value, predicate_value, object_value, graph_context_value}
else else
_ -> nil _ -> nil
@ -111,7 +109,6 @@ defmodule RDF.Quad do
def values(_, _), do: nil def values(_, _), do: nil
@doc """ @doc """
Checks if the given tuple is a valid RDF quad. Checks if the given tuple is a valid RDF quad.
@ -123,5 +120,4 @@ defmodule RDF.Quad do
def valid?(tuple) def valid?(tuple)
def valid?({_, _, _, _} = quad), do: Statement.valid?(quad) def valid?({_, _, _, _} = quad), do: Statement.valid?(quad)
def valid?(_), do: false def valid?(_), do: false
end end

View File

@ -84,7 +84,7 @@ defmodule RDF.Query do
As opposed to `execute/3` this returns the results directly or fails with an As opposed to `execute/3` this returns the results directly or fails with an
exception. exception.
""" """
def execute!(query, graph, opts \\ []) do def execute!(query, graph, opts \\ []) do
case execute(query, graph, opts) do case execute(query, graph, opts) do
{:ok, results} -> results {:ok, results} -> results
{:error, error} -> raise error {:error, error} -> raise error
@ -152,7 +152,7 @@ defmodule RDF.Query do
As opposed to `stream/3` this returns the stream directly or fails with an As opposed to `stream/3` this returns the stream directly or fails with an
exception. exception.
""" """
def stream!(query, graph, opts \\ []) do def stream!(query, graph, opts \\ []) do
case stream(query, graph, opts) do case stream(query, graph, opts) do
{:ok, results} -> results {:ok, results} -> results
{:error, error} -> raise error {:error, error} -> raise error

View File

@ -9,13 +9,12 @@ defmodule RDF.Query.BGP do
@enforce_keys [:triple_patterns] @enforce_keys [:triple_patterns]
defstruct [:triple_patterns] defstruct [:triple_patterns]
@type variable :: String.t()
@type variable :: String.t
@type triple_pattern :: { @type triple_pattern :: {
subject :: variable | RDF.Term.t, subject :: variable | RDF.Term.t(),
predicate :: variable | RDF.Term.t, predicate :: variable | RDF.Term.t(),
object :: variable | RDF.Term.t object :: variable | RDF.Term.t()
} }
@type triple_patterns :: list(triple_pattern) @type triple_patterns :: list(triple_pattern)
@type t :: %__MODULE__{triple_patterns: triple_patterns} @type t :: %__MODULE__{triple_patterns: triple_patterns}

View File

@ -4,7 +4,11 @@ defmodule RDF.Query.BGP.BlankNodeHandler do
alias RDF.Query.BGP alias RDF.Query.BGP
alias RDF.BlankNode alias RDF.BlankNode
@default_remove_bnode_query_variables Application.get_env(:rdf, :default_remove_bnode_query_variables, true) @default_remove_bnode_query_variables Application.get_env(
:rdf,
:default_remove_bnode_query_variables,
true
)
def preprocess(triple_patterns) do def preprocess(triple_patterns) do
Enum.reduce(triple_patterns, {false, []}, fn Enum.reduce(triple_patterns, {false, []}, fn
@ -14,22 +18,30 @@ defmodule RDF.Query.BGP.BlankNodeHandler do
end) end)
end end
defp convert_blank_nodes({%BlankNode{} = s, %BlankNode{} = p, %BlankNode{} = o}), do: {true, {bnode_var(s), bnode_var(p), bnode_var(o)}} defp convert_blank_nodes({%BlankNode{} = s, %BlankNode{} = p, %BlankNode{} = o}),
defp convert_blank_nodes({s, %BlankNode{} = p, %BlankNode{} = o}), do: {true, {s, bnode_var(p), bnode_var(o)}} do: {true, {bnode_var(s), bnode_var(p), bnode_var(o)}}
defp convert_blank_nodes({%BlankNode{} = s, p, %BlankNode{} = o}), do: {true, {bnode_var(s), p, bnode_var(o)}}
defp convert_blank_nodes({%BlankNode{} = s, %BlankNode{} = p, o}), do: {true, {bnode_var(s), bnode_var(p), o}} defp convert_blank_nodes({s, %BlankNode{} = p, %BlankNode{} = o}),
do: {true, {s, bnode_var(p), bnode_var(o)}}
defp convert_blank_nodes({%BlankNode{} = s, p, %BlankNode{} = o}),
do: {true, {bnode_var(s), p, bnode_var(o)}}
defp convert_blank_nodes({%BlankNode{} = s, %BlankNode{} = p, o}),
do: {true, {bnode_var(s), bnode_var(p), o}}
defp convert_blank_nodes({%BlankNode{} = s, p, o}), do: {true, {bnode_var(s), p, o}} defp convert_blank_nodes({%BlankNode{} = s, p, o}), do: {true, {bnode_var(s), p, o}}
defp convert_blank_nodes({s, %BlankNode{} = p, o}), do: {true, {s, bnode_var(p), o}} defp convert_blank_nodes({s, %BlankNode{} = p, o}), do: {true, {s, bnode_var(p), o}}
defp convert_blank_nodes({s, p, %BlankNode{} = o}), do: {true, {s, p, bnode_var(o)}} defp convert_blank_nodes({s, p, %BlankNode{} = o}), do: {true, {s, p, bnode_var(o)}}
defp convert_blank_nodes(triple_pattern), do: {false, triple_pattern} defp convert_blank_nodes(triple_pattern), do: {false, triple_pattern}
defp bnode_var(bnode), do: bnode |> to_string() |> String.to_atom() defp bnode_var(bnode), do: bnode |> to_string() |> String.to_atom()
def postprocess(solutions, bgp, has_blank_nodes, opts) do def postprocess(solutions, bgp, has_blank_nodes, opts) do
if has_blank_nodes and if has_blank_nodes and
Keyword.get(opts, :remove_bnode_query_variables, @default_remove_bnode_query_variables) do Keyword.get(opts, :remove_bnode_query_variables, @default_remove_bnode_query_variables) do
bnode_vars = bgp |> bnodes() |> Enum.map(&bnode_var/1) bnode_vars = bgp |> bnodes() |> Enum.map(&bnode_var/1)
Enum.map(solutions, &(Map.drop(&1, bnode_vars))) Enum.map(solutions, &Map.drop(&1, bnode_vars))
else else
solutions solutions
end end
@ -45,7 +57,7 @@ defmodule RDF.Query.BGP.BlankNodeHandler do
defp bnodes({%BlankNode{} = s, %BlankNode{} = p, %BlankNode{} = o}), do: [s, p, o] defp bnodes({%BlankNode{} = s, %BlankNode{} = p, %BlankNode{} = o}), do: [s, p, o]
defp bnodes({%BlankNode{} = s, %BlankNode{} = p, _}), do: [s, p] defp bnodes({%BlankNode{} = s, %BlankNode{} = p, _}), do: [s, p]
defp bnodes({%BlankNode{} = s, _, %BlankNode{} = o}) , do: [s, o] defp bnodes({%BlankNode{} = s, _, %BlankNode{} = o}), do: [s, o]
defp bnodes({_, %BlankNode{} = p, %BlankNode{} = o}), do: [p, o] defp bnodes({_, %BlankNode{} = p, %BlankNode{} = o}), do: [p, o]
defp bnodes({%BlankNode{} = s, _, _}), do: [s] defp bnodes({%BlankNode{} = s, _, _}), do: [s]
defp bnodes({_, %BlankNode{} = p, _}), do: [p] defp bnodes({_, %BlankNode{} = p, _}), do: [p]

View File

@ -1,7 +1,7 @@
defmodule RDF.Query.BGP.Matcher do defmodule RDF.Query.BGP.Matcher do
@moduledoc !""" @moduledoc !"""
An interface for various BGP matching algorithm implementations. An interface for various BGP matching algorithm implementations.
""" """
alias RDF.Query.BGP alias RDF.Query.BGP
alias RDF.Graph alias RDF.Graph
@ -9,8 +9,7 @@ defmodule RDF.Query.BGP.Matcher do
@type solution :: map @type solution :: map
@type solutions :: [solution] @type solutions :: [solution]
@callback execute(BGP.t, Graph.t, opts :: Keyword.t) :: solutions @callback execute(BGP.t(), Graph.t(), opts :: Keyword.t()) :: solutions
@callback stream(BGP.t, Graph.t, opts :: Keyword.t) :: Enumerable.t()
@callback stream(BGP.t(), Graph.t(), opts :: Keyword.t()) :: Enumerable.t()
end end

View File

@ -14,28 +14,30 @@ defmodule RDF.Query.BGP.QueryPlanner do
query_plan( query_plan(
mark_solved_variables(rest, new_solved), mark_solved_variables(rest, new_solved),
new_solved, new_solved,
[next_best | plan]) [next_best | plan]
)
end end
defp triple_priority({v, v, v}), do: triple_priority({v, "p", "o"}) defp triple_priority({v, v, v}), do: triple_priority({v, "p", "o"})
defp triple_priority({v, v, o}), do: triple_priority({v, "p", o}) defp triple_priority({v, v, o}), do: triple_priority({v, "p", o})
defp triple_priority({v, p, v}), do: triple_priority({v, p, "o"}) defp triple_priority({v, p, v}), do: triple_priority({v, p, "o"})
defp triple_priority({s, v, v}), do: triple_priority({s, v, "o"}) defp triple_priority({s, v, v}), do: triple_priority({s, v, "o"})
defp triple_priority({s, p, o}) do defp triple_priority({s, p, o}) do
{sp, pp, op} = {value_priority(s), value_priority(p), value_priority(o)} {sp, pp, op} = {value_priority(s), value_priority(p), value_priority(o)}
<<(sp + pp + op) :: size(2), sp :: size(1), pp :: size(1), op :: size(1)>> <<sp + pp + op::size(2), sp::size(1), pp::size(1), op::size(1)>>
end end
defp value_priority(value) when is_atom(value), do: 1 defp value_priority(value) when is_atom(value), do: 1
defp value_priority(_), do: 0 defp value_priority(_), do: 0
defp mark_solved_variables(triple_patterns, solved) do defp mark_solved_variables(triple_patterns, solved) do
Enum.map triple_patterns, fn {s, p, o} -> Enum.map(triple_patterns, fn {s, p, o} ->
{ {
(if is_atom(s) and s in solved, do: {s}, else: s), if(is_atom(s) and s in solved, do: {s}, else: s),
(if is_atom(p) and p in solved, do: {p}, else: p), if(is_atom(p) and p in solved, do: {p}, else: p),
(if is_atom(o) and o in solved, do: {o}, else: o) if(is_atom(o) and o in solved, do: {o}, else: o)
} }
end end)
end end
end end

View File

@ -10,11 +10,11 @@ defmodule RDF.Query.BGP.Simple do
@impl RDF.Query.BGP.Matcher @impl RDF.Query.BGP.Matcher
def execute(bgp, graph, opts \\ []) def execute(bgp, graph, opts \\ [])
def execute(%BGP{triple_patterns: []}, _, _), do: [%{}] # https://www.w3.org/TR/sparql11-query/#emptyGroupPattern # https://www.w3.org/TR/sparql11-query/#emptyGroupPattern
def execute(%BGP{triple_patterns: []}, _, _), do: [%{}]
def execute(%BGP{triple_patterns: triple_patterns}, %Graph{} = graph, opts) do def execute(%BGP{triple_patterns: triple_patterns}, %Graph{} = graph, opts) do
{bnode_state, preprocessed_triple_patterns} = {bnode_state, preprocessed_triple_patterns} = BlankNodeHandler.preprocess(triple_patterns)
BlankNodeHandler.preprocess(triple_patterns)
preprocessed_triple_patterns preprocessed_triple_patterns
|> QueryPlanner.query_plan() |> QueryPlanner.query_plan()
@ -28,7 +28,6 @@ defmodule RDF.Query.BGP.Simple do
|> Stream.into([]) |> Stream.into([])
end end
defp do_execute([triple_pattern | remaining], graph) do defp do_execute([triple_pattern | remaining], graph) do
do_execute(remaining, graph, match(graph, triple_pattern)) do_execute(remaining, graph, match(graph, triple_pattern))
end end
@ -43,19 +42,18 @@ defmodule RDF.Query.BGP.Simple do
do_execute(remaining, graph, match_with_solutions(graph, triple_pattern, solutions)) do_execute(remaining, graph, match_with_solutions(graph, triple_pattern, solutions))
end end
defp match_with_solutions(graph, {s, p, o} = triple_pattern, existing_solutions) defp match_with_solutions(graph, {s, p, o} = triple_pattern, existing_solutions)
when is_tuple(s) or is_tuple(p) or is_tuple(o) do when is_tuple(s) or is_tuple(p) or is_tuple(o) do
triple_pattern triple_pattern
|> apply_solutions(existing_solutions) |> apply_solutions(existing_solutions)
|> Enum.flat_map(&(merging_match(&1, graph))) |> Enum.flat_map(&merging_match(&1, graph))
end end
defp match_with_solutions(graph, triple_pattern, existing_solutions) do defp match_with_solutions(graph, triple_pattern, existing_solutions) do
graph graph
|> match(triple_pattern) |> match(triple_pattern)
|> Enum.flat_map(fn solution -> |> Enum.flat_map(fn solution ->
Enum.map(existing_solutions, &(Map.merge(solution, &1))) Enum.map(existing_solutions, &Map.merge(solution, &1))
end) end)
end end
@ -63,14 +61,15 @@ defmodule RDF.Query.BGP.Simple do
apply_solution = apply_solution =
case triple_pattern do case triple_pattern do
{{s}, {p}, {o}} -> fn solution -> {solution, {solution[s], solution[p], solution[o]}} end {{s}, {p}, {o}} -> fn solution -> {solution, {solution[s], solution[p], solution[o]}} end
{{s}, {p}, o } -> fn solution -> {solution, {solution[s], solution[p], o}} end {{s}, {p}, o} -> fn solution -> {solution, {solution[s], solution[p], o}} end
{{s}, p , {o}} -> fn solution -> {solution, {solution[s], p , solution[o]}} end {{s}, p, {o}} -> fn solution -> {solution, {solution[s], p, solution[o]}} end
{{s}, p , o } -> fn solution -> {solution, {solution[s], p , o}} end {{s}, p, o} -> fn solution -> {solution, {solution[s], p, o}} end
{ s , {p}, {o}} -> fn solution -> {solution, {s , solution[p], solution[o]}} end {s, {p}, {o}} -> fn solution -> {solution, {s, solution[p], solution[o]}} end
{ s , {p} , o } -> fn solution -> {solution, {s , solution[p], o}} end {s, {p}, o} -> fn solution -> {solution, {s, solution[p], o}} end
{ s , p , {o}} -> fn solution -> {solution, {s , p , solution[o]}} end {s, p, {o}} -> fn solution -> {solution, {s, p, solution[o]}} end
_ -> nil _ -> nil
end end
if apply_solution do if apply_solution do
Stream.map(solutions, apply_solution) Stream.map(solutions, apply_solution)
else else
@ -80,7 +79,9 @@ defmodule RDF.Query.BGP.Simple do
defp merging_match({dependent_solution, triple_pattern}, graph) do defp merging_match({dependent_solution, triple_pattern}, graph) do
case match(graph, triple_pattern) do case match(graph, triple_pattern) do
nil -> [] nil ->
[]
solutions -> solutions ->
Enum.map(solutions, fn solution -> Enum.map(solutions, fn solution ->
Map.merge(dependent_solution, solution) Map.merge(dependent_solution, solution)
@ -88,12 +89,13 @@ defmodule RDF.Query.BGP.Simple do
end end
end end
defp match(%Graph{descriptions: descriptions}, {subject_variable, _, _} = triple_pattern) defp match(%Graph{descriptions: descriptions}, {subject_variable, _, _} = triple_pattern)
when is_atom(subject_variable) do when is_atom(subject_variable) do
Enum.reduce(descriptions, [], fn ({subject, description}, acc) -> Enum.reduce(descriptions, [], fn {subject, description}, acc ->
case match(description, solve_variables(subject_variable, subject, triple_pattern)) do case match(description, solve_variables(subject_variable, subject, triple_pattern)) do
nil -> acc nil ->
acc
solutions -> solutions ->
Enum.map(solutions, fn solution -> Enum.map(solutions, fn solution ->
Map.put(solution, subject_variable, subject) Map.put(solution, subject_variable, subject)
@ -104,14 +106,14 @@ defmodule RDF.Query.BGP.Simple do
defp match(%Graph{} = graph, {subject, _, _} = triple_pattern) do defp match(%Graph{} = graph, {subject, _, _} = triple_pattern) do
case graph[subject] do case graph[subject] do
nil -> [] nil -> []
description -> match(description, triple_pattern) description -> match(description, triple_pattern)
end end
end end
defp match(%Description{predications: predications}, {_, variable, variable}) defp match(%Description{predications: predications}, {_, variable, variable})
when is_atom(variable) do when is_atom(variable) do
Enum.reduce(predications, [], fn ({predicate, objects}, solutions) -> Enum.reduce(predications, [], fn {predicate, objects}, solutions ->
if Map.has_key?(objects, predicate) do if Map.has_key?(objects, predicate) do
[%{variable => predicate} | solutions] [%{variable => predicate} | solutions]
else else
@ -122,17 +124,20 @@ defmodule RDF.Query.BGP.Simple do
defp match(%Description{predications: predications}, {_, predicate_variable, object_variable}) defp match(%Description{predications: predications}, {_, predicate_variable, object_variable})
when is_atom(predicate_variable) and is_atom(object_variable) do when is_atom(predicate_variable) and is_atom(object_variable) do
Enum.reduce(predications, [], fn ({predicate, objects}, solutions) -> Enum.reduce(predications, [], fn {predicate, objects}, solutions ->
solutions ++ solutions ++
Enum.map(objects, fn {object, _} -> Enum.map(objects, fn {object, _} ->
%{predicate_variable => predicate, object_variable => object} %{predicate_variable => predicate, object_variable => object}
end) end)
end) end)
end end
defp match(%Description{predications: predications}, defp match(
{_, predicate_variable, object}) when is_atom(predicate_variable) do %Description{predications: predications},
Enum.reduce(predications, [], fn ({predicate, objects}, solutions) -> {_, predicate_variable, object}
)
when is_atom(predicate_variable) do
Enum.reduce(predications, [], fn {predicate, objects}, solutions ->
if Map.has_key?(objects, object) do if Map.has_key?(objects, object) do
[%{predicate_variable => predicate} | solutions] [%{predicate_variable => predicate} | solutions]
else else
@ -141,10 +146,14 @@ defmodule RDF.Query.BGP.Simple do
end) end)
end end
defp match(%Description{predications: predications}, defp match(
{_, predicate, object_or_variable}) do %Description{predications: predications},
{_, predicate, object_or_variable}
) do
case predications[predicate] do case predications[predicate] do
nil -> [] nil ->
[]
objects -> objects ->
cond do cond do
# object_or_variable is a variable # object_or_variable is a variable
@ -165,11 +174,11 @@ defmodule RDF.Query.BGP.Simple do
end end
defp solve_variables(var, val, {var, var, var}), do: {val, val, val} defp solve_variables(var, val, {var, var, var}), do: {val, val, val}
defp solve_variables(var, val, {s, var, var}), do: {s, val, val} defp solve_variables(var, val, {s, var, var}), do: {s, val, val}
defp solve_variables(var, val, {var, p, var}), do: {val, p, val} defp solve_variables(var, val, {var, p, var}), do: {val, p, val}
defp solve_variables(var, val, {var, var, o}), do: {val, val, o} defp solve_variables(var, val, {var, var, o}), do: {val, val, o}
defp solve_variables(var, val, {var, p, o}), do: {val, p, o} defp solve_variables(var, val, {var, p, o}), do: {val, p, o}
defp solve_variables(var, val, {s, var, o}), do: {s, val, o} defp solve_variables(var, val, {s, var, o}), do: {s, val, o}
defp solve_variables(var, val, {s, p, var}), do: {s, p, val} defp solve_variables(var, val, {s, p, var}), do: {s, p, val}
defp solve_variables(_, _, pattern), do: pattern defp solve_variables(_, _, pattern), do: pattern
end end

View File

@ -7,15 +7,14 @@ defmodule RDF.Query.BGP.Stream do
alias RDF.Query.BGP.{QueryPlanner, BlankNodeHandler} alias RDF.Query.BGP.{QueryPlanner, BlankNodeHandler}
alias RDF.{Graph, Description} alias RDF.{Graph, Description}
@impl RDF.Query.BGP.Matcher @impl RDF.Query.BGP.Matcher
def stream(bgp, graph, opts \\ []) def stream(bgp, graph, opts \\ [])
def stream(%BGP{triple_patterns: []}, _, _), do: to_stream([%{}]) # https://www.w3.org/TR/sparql11-query/#emptyGroupPattern # https://www.w3.org/TR/sparql11-query/#emptyGroupPattern
def stream(%BGP{triple_patterns: []}, _, _), do: to_stream([%{}])
def stream(%BGP{triple_patterns: triple_patterns}, %Graph{} = graph, opts) do def stream(%BGP{triple_patterns: triple_patterns}, %Graph{} = graph, opts) do
{bnode_state, preprocessed_triple_patterns} = {bnode_state, preprocessed_triple_patterns} = BlankNodeHandler.preprocess(triple_patterns)
BlankNodeHandler.preprocess(triple_patterns)
preprocessed_triple_patterns preprocessed_triple_patterns
|> QueryPlanner.query_plan() |> QueryPlanner.query_plan()
@ -47,18 +46,17 @@ defmodule RDF.Query.BGP.Stream do
do_execute(remaining, graph, match_with_solutions(graph, triple_pattern, solutions)) do_execute(remaining, graph, match_with_solutions(graph, triple_pattern, solutions))
end end
defp match_with_solutions(graph, {s, p, o} = triple_pattern, existing_solutions) defp match_with_solutions(graph, {s, p, o} = triple_pattern, existing_solutions)
when is_tuple(s) or is_tuple(p) or is_tuple(o) do when is_tuple(s) or is_tuple(p) or is_tuple(o) do
triple_pattern triple_pattern
|> apply_solutions(existing_solutions) |> apply_solutions(existing_solutions)
|> Stream.flat_map(&(merging_match(&1, graph))) |> Stream.flat_map(&merging_match(&1, graph))
end end
defp match_with_solutions(graph, triple_pattern, existing_solutions) do defp match_with_solutions(graph, triple_pattern, existing_solutions) do
if solutions = match(graph, triple_pattern) do if solutions = match(graph, triple_pattern) do
Stream.flat_map(solutions, fn solution -> Stream.flat_map(solutions, fn solution ->
Stream.map(existing_solutions, &(Map.merge(solution, &1))) Stream.map(existing_solutions, &Map.merge(solution, &1))
end) end)
end end
end end
@ -67,14 +65,15 @@ defmodule RDF.Query.BGP.Stream do
apply_solution = apply_solution =
case triple_pattern do case triple_pattern do
{{s}, {p}, {o}} -> fn solution -> {solution, {solution[s], solution[p], solution[o]}} end {{s}, {p}, {o}} -> fn solution -> {solution, {solution[s], solution[p], solution[o]}} end
{{s}, {p}, o } -> fn solution -> {solution, {solution[s], solution[p], o}} end {{s}, {p}, o} -> fn solution -> {solution, {solution[s], solution[p], o}} end
{{s}, p , {o}} -> fn solution -> {solution, {solution[s], p , solution[o]}} end {{s}, p, {o}} -> fn solution -> {solution, {solution[s], p, solution[o]}} end
{{s}, p , o } -> fn solution -> {solution, {solution[s], p , o}} end {{s}, p, o} -> fn solution -> {solution, {solution[s], p, o}} end
{ s , {p}, {o}} -> fn solution -> {solution, {s , solution[p], solution[o]}} end {s, {p}, {o}} -> fn solution -> {solution, {s, solution[p], solution[o]}} end
{ s , {p} , o } -> fn solution -> {solution, {s , solution[p], o}} end {s, {p}, o} -> fn solution -> {solution, {s, solution[p], o}} end
{ s , p , {o}} -> fn solution -> {solution, {s , p , solution[o]}} end {s, p, {o}} -> fn solution -> {solution, {s, p, solution[o]}} end
_ -> nil _ -> nil
end end
if apply_solution do if apply_solution do
Stream.map(solutions, apply_solution) Stream.map(solutions, apply_solution)
else else
@ -84,20 +83,23 @@ defmodule RDF.Query.BGP.Stream do
defp merging_match({dependent_solution, triple_pattern}, graph) do defp merging_match({dependent_solution, triple_pattern}, graph) do
case match(graph, triple_pattern) do case match(graph, triple_pattern) do
nil -> [] nil ->
[]
solutions -> solutions ->
Stream.map solutions, fn solution -> Stream.map(solutions, fn solution ->
Map.merge(dependent_solution, solution) Map.merge(dependent_solution, solution)
end end)
end end
end end
defp match(%Graph{descriptions: descriptions}, {subject_variable, _, _} = triple_pattern) defp match(%Graph{descriptions: descriptions}, {subject_variable, _, _} = triple_pattern)
when is_atom(subject_variable) do when is_atom(subject_variable) do
Stream.flat_map(descriptions, fn {subject, description} -> Stream.flat_map(descriptions, fn {subject, description} ->
case match(description, solve_variables(subject_variable, subject, triple_pattern)) do case match(description, solve_variables(subject_variable, subject, triple_pattern)) do
nil -> [] nil ->
[]
solutions -> solutions ->
Stream.map(solutions, fn solution -> Stream.map(solutions, fn solution ->
Map.put(solution, subject_variable, subject) Map.put(solution, subject_variable, subject)
@ -108,7 +110,7 @@ defmodule RDF.Query.BGP.Stream do
defp match(%Graph{} = graph, {subject, _, _} = triple_pattern) do defp match(%Graph{} = graph, {subject, _, _} = triple_pattern) do
case graph[subject] do case graph[subject] do
nil -> nil nil -> nil
description -> match(description, triple_pattern) description -> match(description, triple_pattern)
end end
end end
@ -132,20 +134,26 @@ defmodule RDF.Query.BGP.Stream do
end) end)
end end
defp match(%Description{predications: predications}, defp match(
{_, predicate_variable, object}) when is_atom(predicate_variable) do %Description{predications: predications},
matches = {_, predicate_variable, object}
Stream.filter(predications, fn {_, objects} -> Map.has_key?(objects, object) end) )
when is_atom(predicate_variable) do
matches = Stream.filter(predications, fn {_, objects} -> Map.has_key?(objects, object) end)
unless Enum.empty?(matches) do unless Enum.empty?(matches) do
Stream.map(matches, fn {predicate, _} -> %{predicate_variable => predicate} end) Stream.map(matches, fn {predicate, _} -> %{predicate_variable => predicate} end)
end end
end end
defp match(%Description{predications: predications}, defp match(
{_, predicate, object_or_variable}) do %Description{predications: predications},
{_, predicate, object_or_variable}
) do
case predications[predicate] do case predications[predicate] do
nil -> nil nil ->
nil
objects -> objects ->
cond do cond do
# object_or_variable is a variable # object_or_variable is a variable
@ -162,17 +170,17 @@ defmodule RDF.Query.BGP.Stream do
true -> true ->
nil nil
end end
end end
end end
defp solve_variables(var, val, {var, var, var}), do: {val, val, val} defp solve_variables(var, val, {var, var, var}), do: {val, val, val}
defp solve_variables(var, val, {s, var, var}), do: {s, val, val} defp solve_variables(var, val, {s, var, var}), do: {s, val, val}
defp solve_variables(var, val, {var, p, var}), do: {val, p, val} defp solve_variables(var, val, {var, p, var}), do: {val, p, val}
defp solve_variables(var, val, {var, var, o}), do: {val, val, o} defp solve_variables(var, val, {var, var, o}), do: {val, val, o}
defp solve_variables(var, val, {var, p, o}), do: {val, p, o} defp solve_variables(var, val, {var, p, o}), do: {val, p, o}
defp solve_variables(var, val, {s, var, o}), do: {s, val, o} defp solve_variables(var, val, {s, var, o}), do: {s, val, o}
defp solve_variables(var, val, {s, p, var}), do: {s, p, val} defp solve_variables(var, val, {s, p, var}), do: {s, p, val}
defp solve_variables(_, _, pattern), do: pattern defp solve_variables(_, _, pattern), do: pattern
defp to_stream(enum), do: Stream.into(enum, []) defp to_stream(enum), do: Stream.into(enum, [])
end end

View File

@ -32,7 +32,7 @@ defmodule RDF.Query.Builder do
end end
defp triple_patterns(triple_pattern) when is_tuple(triple_pattern), defp triple_patterns(triple_pattern) when is_tuple(triple_pattern),
do: triple_patterns([triple_pattern]) do: triple_patterns([triple_pattern])
defp triple_pattern({subject, predicate, object}) defp triple_pattern({subject, predicate, object})
when not is_list(predicate) and not is_list(object) do when not is_list(predicate) and not is_list(object) do
@ -43,7 +43,8 @@ defmodule RDF.Query.Builder do
end end
end end
defp triple_pattern(combined_objects_triple_pattern) when is_tuple(combined_objects_triple_pattern) do defp triple_pattern(combined_objects_triple_pattern)
when is_tuple(combined_objects_triple_pattern) do
[subject | rest] = Tuple.to_list(combined_objects_triple_pattern) [subject | rest] = Tuple.to_list(combined_objects_triple_pattern)
case rest do case rest do
@ -53,9 +54,10 @@ defmodule RDF.Query.Builder do
|> Enum.map(fn object -> {subject, predicate, object} end) |> Enum.map(fn object -> {subject, predicate, object} end)
|> triple_patterns() |> triple_patterns()
else else
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid use of predicate-object pair brackets"} %RDF.Query.InvalidError{
} message: "Invalid use of predicate-object pair brackets"
}}
end end
predicate_object_pairs -> predicate_object_pairs ->
@ -66,9 +68,10 @@ defmodule RDF.Query.Builder do
end) end)
|> triple_patterns() |> triple_patterns()
else else
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid use of predicate-object pair brackets"} %RDF.Query.InvalidError{
} message: "Invalid use of predicate-object pair brackets"
}}
end end
end end
end end
@ -79,9 +82,10 @@ defmodule RDF.Query.Builder do
if value do if value do
{:ok, value} {:ok, value}
else else
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid subject term in BGP triple pattern: #{inspect subject}"} %RDF.Query.InvalidError{
} message: "Invalid subject term in BGP triple pattern: #{inspect(subject)}"
}}
end end
end end
@ -91,9 +95,10 @@ defmodule RDF.Query.Builder do
if value do if value do
{:ok, value} {:ok, value}
else else
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid predicate term in BGP triple pattern: #{inspect predicate}"} %RDF.Query.InvalidError{
} message: "Invalid predicate term in BGP triple pattern: #{inspect(predicate)}"
}}
end end
end end
@ -103,9 +108,10 @@ defmodule RDF.Query.Builder do
if value do if value do
{:ok, value} {:ok, value}
else else
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid object term in BGP triple pattern: #{inspect object}"} %RDF.Query.InvalidError{
} message: "Invalid object term in BGP triple pattern: #{inspect(object)}"
}}
end end
end end
@ -146,13 +152,13 @@ defmodule RDF.Query.Builder do
defp literal(%Literal{} = literal), do: literal defp literal(%Literal{} = literal), do: literal
defp literal(value), do: Literal.coerce(value) defp literal(value), do: Literal.coerce(value)
def path(query, opts \\ []) def path(query, opts \\ [])
def path(query, _) when is_list(query) and length(query) < 3 do def path(query, _) when is_list(query) and length(query) < 3 do
{:error, %RDF.Query.InvalidError{ {:error,
message: "Invalid path expression: must have at least three elements"} %RDF.Query.InvalidError{
} message: "Invalid path expression: must have at least three elements"
}}
end end
def path([subject | rest], opts) do def path([subject | rest], opts) do
@ -175,6 +181,12 @@ defmodule RDF.Query.Builder do
defp path_pattern(subject, [predicate | rest], triple_patterns, count, with_elements) do defp path_pattern(subject, [predicate | rest], triple_patterns, count, with_elements) do
object = if with_elements, do: :"el#{count}?", else: RDF.bnode(count) object = if with_elements, do: :"el#{count}?", else: RDF.bnode(count)
path_pattern(object, rest, [{subject, predicate, object} | triple_patterns], count + 1, with_elements) path_pattern(
object,
rest,
[{subject, predicate, object} | triple_patterns],
count + 1,
with_elements
)
end end
end end

View File

@ -11,7 +11,7 @@ defmodule RDF.Serialization.Decoder do
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@callback decode(String.t, keyword | map) :: {:ok, Graph.t | Dataset.t} | {:error, any} @callback decode(String.t(), keyword | map) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
@doc """ @doc """
Decodes a serialized `RDF.Graph` or `RDF.Dataset` from the given string. Decodes a serialized `RDF.Graph` or `RDF.Dataset` from the given string.
@ -21,24 +21,22 @@ defmodule RDF.Serialization.Decoder do
Note: The `__using__` macro automatically provides an overridable default Note: The `__using__` macro automatically provides an overridable default
implementation based on the non-bang `decode` function. implementation based on the non-bang `decode` function.
""" """
@callback decode!(String.t, keyword | map) :: RDF.Graph.t | RDF.Dataset.t @callback decode!(String.t(), keyword | map) :: RDF.Graph.t() | RDF.Dataset.t()
defmacro __using__(_) do defmacro __using__(_) do
quote bind_quoted: [], unquote: true do quote bind_quoted: [], unquote: true do
@behaviour unquote(__MODULE__) @behaviour unquote(__MODULE__)
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
@spec decode!(String.t, keyword | map) :: RDF.Graph.t | RDF.Dataset.t @spec decode!(String.t(), keyword | map) :: RDF.Graph.t() | RDF.Dataset.t()
def decode!(content, opts \\ []) do def decode!(content, opts \\ []) do
case decode(content, opts) do case decode(content, opts) do
{:ok, data} -> data {:ok, data} -> data
{:error, reason} -> raise reason {:error, reason} -> raise reason
end end
end end
defoverridable [decode!: 2] defoverridable decode!: 2
end end
end end
end end

View File

@ -12,7 +12,7 @@ defmodule RDF.Serialization.Encoder do
It returns an `{:ok, string}` tuple, with `string` being the serialized It returns an `{:ok, string}` tuple, with `string` being the serialized
`RDF.Graph` or `RDF.Dataset`, or `{:error, reason}` if an error occurs. `RDF.Graph` or `RDF.Dataset`, or `{:error, reason}` if an error occurs.
""" """
@callback encode(Graph.t | Dataset.t, keyword | map) :: {:ok, String.t} | {:error, any} @callback encode(Graph.t() | Dataset.t(), keyword | map) :: {:ok, String.t()} | {:error, any}
@doc """ @doc """
Encodes a `RDF.Graph` or `RDF.Dataset`. Encodes a `RDF.Graph` or `RDF.Dataset`.
@ -22,8 +22,7 @@ defmodule RDF.Serialization.Encoder do
Note: The `__using__` macro automatically provides an overridable default Note: The `__using__` macro automatically provides an overridable default
implementation based on the non-bang `encode` function. implementation based on the non-bang `encode` function.
""" """
@callback encode!(Graph.t | Dataset.t, keyword | map) :: String.t @callback encode!(Graph.t() | Dataset.t(), keyword | map) :: String.t()
defmacro __using__(_) do defmacro __using__(_) do
quote bind_quoted: [], unquote: true do quote bind_quoted: [], unquote: true do
@ -31,17 +30,16 @@ defmodule RDF.Serialization.Encoder do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
@dialyzer {:nowarn_function, encode!: 2} @dialyzer {:nowarn_function, encode!: 2}
@spec encode!(Graph.t | Dataset.t, keyword) :: String.t @spec encode!(Graph.t() | Dataset.t(), keyword) :: String.t()
def encode!(data, opts \\ []) do def encode!(data, opts \\ []) do
case encode(data, opts) do case encode(data, opts) do
{:ok, data} -> data {:ok, data} -> data
{:error, reason} -> raise reason {:error, reason} -> raise reason
end end
end end
defoverridable [encode!: 1] defoverridable encode!: 1
defoverridable [encode!: 2] defoverridable encode!: 2
end end
end end
end end

View File

@ -33,7 +33,7 @@ defmodule RDF.Serialization.Format do
@doc """ @doc """
An IRI of the serialization format. An IRI of the serialization format.
""" """
@callback id :: RDF.IRI.t @callback id :: RDF.IRI.t()
@doc """ @doc """
An name atom of the serialization format. An name atom of the serialization format.
@ -43,12 +43,12 @@ defmodule RDF.Serialization.Format do
@doc """ @doc """
The usual file extension for the serialization format. The usual file extension for the serialization format.
""" """
@callback extension :: String.t @callback extension :: String.t()
@doc """ @doc """
The MIME type of the serialization format. The MIME type of the serialization format.
""" """
@callback media_type :: String.t @callback media_type :: String.t()
@doc """ @doc """
A map with the supported options of the `Encoder` and `Decoder` for the serialization format. A map with the supported options of the `Encoder` and `Decoder` for the serialization format.
@ -65,7 +65,6 @@ defmodule RDF.Serialization.Format do
""" """
@callback encoder :: module @callback encoder :: module
defmacro __using__(_) do defmacro __using__(_) do
quote bind_quoted: [], unquote: true do quote bind_quoted: [], unquote: true do
@behaviour unquote(__MODULE__) @behaviour unquote(__MODULE__)
@ -82,31 +81,37 @@ defmodule RDF.Serialization.Format do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def options, do: %{} def options, do: %{}
defoverridable [decoder: 0, encoder: 0, options: 0] defoverridable decoder: 0, encoder: 0, options: 0
@spec read_string(String.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec read_string(String.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_string(content, opts \\ []), def read_string(content, opts \\ []),
do: RDF.Serialization.Reader.read_string(decoder(), content, opts) do: RDF.Serialization.Reader.read_string(decoder(), content, opts)
@spec read_string!(String.t, keyword) :: Graph.t | Dataset.t
@spec read_string!(String.t(), keyword) :: Graph.t() | Dataset.t()
def read_string!(content, opts \\ []), def read_string!(content, opts \\ []),
do: RDF.Serialization.Reader.read_string!(decoder(), content, opts) do: RDF.Serialization.Reader.read_string!(decoder(), content, opts)
@spec read_file(Path.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any}
@spec read_file(Path.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_file(file, opts \\ []), def read_file(file, opts \\ []),
do: RDF.Serialization.Reader.read_file(decoder(), file, opts) do: RDF.Serialization.Reader.read_file(decoder(), file, opts)
@spec read_file!(Path.t, keyword) :: Graph.t | Dataset.t
@spec read_file!(Path.t(), keyword) :: Graph.t() | Dataset.t()
def read_file!(file, opts \\ []), def read_file!(file, opts \\ []),
do: RDF.Serialization.Reader.read_file!(decoder(), file, opts) do: RDF.Serialization.Reader.read_file!(decoder(), file, opts)
@spec write_string(Graph.t | Dataset.t, keyword) :: {:ok, String.t} | {:error, any} @spec write_string(Graph.t() | Dataset.t(), keyword) :: {:ok, String.t()} | {:error, any}
def write_string(data, opts \\ []), def write_string(data, opts \\ []),
do: RDF.Serialization.Writer.write_string(encoder(), data, opts) do: RDF.Serialization.Writer.write_string(encoder(), data, opts)
@spec write_string!(Graph.t | Dataset.t, keyword) :: String.t
@spec write_string!(Graph.t() | Dataset.t(), keyword) :: String.t()
def write_string!(data, opts \\ []), def write_string!(data, opts \\ []),
do: RDF.Serialization.Writer.write_string!(encoder(), data, opts) do: RDF.Serialization.Writer.write_string!(encoder(), data, opts)
@spec write_file(Graph.t | Dataset.t, Path.t, keyword) :: :ok | {:error, any}
@spec write_file(Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok | {:error, any}
def write_file(data, path, opts \\ []), def write_file(data, path, opts \\ []),
do: RDF.Serialization.Writer.write_file(encoder(), data, path, opts) do: RDF.Serialization.Writer.write_file(encoder(), data, path, opts)
@spec write_file!(Graph.t | Dataset.t, Path.t, keyword) :: :ok
@spec write_file!(Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok
def write_file!(data, path, opts \\ []), def write_file!(data, path, opts \\ []),
do: RDF.Serialization.Writer.write_file!(encoder(), data, path, opts) do: RDF.Serialization.Writer.write_file!(encoder(), data, path, opts)
@ -117,26 +122,28 @@ defmodule RDF.Serialization.Format do
defmacro __before_compile__(_env) do defmacro __before_compile__(_env) do
quote do quote do
if !Module.defines?(__MODULE__, {:id, 0}) && if !Module.defines?(__MODULE__, {:id, 0}) &&
Module.get_attribute(__MODULE__, :id) do Module.get_attribute(__MODULE__, :id) do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def id, do: @id def id, do: @id
end end
if !Module.defines?(__MODULE__, {:name, 0}) && if !Module.defines?(__MODULE__, {:name, 0}) &&
Module.get_attribute(__MODULE__, :name) do Module.get_attribute(__MODULE__, :name) do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def name, do: @name def name, do: @name
end end
if !Module.defines?(__MODULE__, {:extension, 0}) && if !Module.defines?(__MODULE__, {:extension, 0}) &&
Module.get_attribute(__MODULE__, :extension) do Module.get_attribute(__MODULE__, :extension) do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def extension, do: @extension def extension, do: @extension
end end
if !Module.defines?(__MODULE__, {:media_type, 0}) && if !Module.defines?(__MODULE__, {:media_type, 0}) &&
Module.get_attribute(__MODULE__, :media_type) do Module.get_attribute(__MODULE__, :media_type) do
@impl unquote(__MODULE__) @impl unquote(__MODULE__)
def media_type, do: @media_type def media_type, do: @media_type
end end
end end
end end
end end

View File

@ -6,7 +6,6 @@ defmodule RDF.Serialization.ParseHelper do
@rdf_type RDF.Utils.Bootstrapping.rdf_iri("type") @rdf_type RDF.Utils.Bootstrapping.rdf_iri("type")
def rdf_type, do: @rdf_type def rdf_type, do: @rdf_type
def to_iri_string({:iriref, _line, value}), do: value |> iri_unescape def to_iri_string({:iriref, _line, value}), do: value |> iri_unescape
def to_iri({:iriref, line, value}) do def to_iri({:iriref, line, value}) do
@ -29,65 +28,69 @@ defmodule RDF.Serialization.ParseHelper do
end end
end end
def to_bnode({:blank_node_label, _line, value}), do: RDF.bnode(value) def to_bnode({:blank_node_label, _line, value}), do: RDF.bnode(value)
def to_bnode({:anon, _line}), do: RDF.bnode def to_bnode({:anon, _line}), do: RDF.bnode()
def to_literal({:string_literal_quote, _line, value}), def to_literal({:string_literal_quote, _line, value}),
do: value |> string_unescape |> RDF.literal do: value |> string_unescape |> RDF.literal()
def to_literal({:integer, _line, value}), do: RDF.literal(value) def to_literal({:integer, _line, value}), do: RDF.literal(value)
def to_literal({:decimal, _line, value}), do: RDF.literal(value) def to_literal({:decimal, _line, value}), do: RDF.literal(value)
def to_literal({:double, _line, value}), do: RDF.literal(value) def to_literal({:double, _line, value}), do: RDF.literal(value)
def to_literal({:boolean, _line, value}), do: RDF.literal(value) def to_literal({:boolean, _line, value}), do: RDF.literal(value)
def to_literal({:string_literal_quote, _line, value}, {:language, language}), def to_literal({:string_literal_quote, _line, value}, {:language, language}),
do: value |> string_unescape |> RDF.literal(language: language) do: value |> string_unescape |> RDF.literal(language: language)
def to_literal({:string_literal_quote, _line, value}, {:datatype, %IRI{} = type}), def to_literal({:string_literal_quote, _line, value}, {:datatype, %IRI{} = type}),
do: value |> string_unescape |> RDF.literal(datatype: type) do: value |> string_unescape |> RDF.literal(datatype: type)
def to_literal(string_literal_quote_ast, type), def to_literal(string_literal_quote_ast, type),
do: {string_literal_quote_ast, type} do: {string_literal_quote_ast, type}
def integer(value), do: RDF.XSD.Integer.new(List.to_string(value)) def integer(value), do: RDF.XSD.Integer.new(List.to_string(value))
def decimal(value), do: RDF.XSD.Decimal.new(List.to_string(value)) def decimal(value), do: RDF.XSD.Decimal.new(List.to_string(value))
def double(value), do: RDF.XSD.Double.new(List.to_string(value)) def double(value), do: RDF.XSD.Double.new(List.to_string(value))
def boolean('true'), do: true def boolean('true'), do: true
def boolean('false'), do: false def boolean('false'), do: false
def to_langtag({:langtag, _line, value}), do: value def to_langtag({:langtag, _line, value}), do: value
def to_langtag({:"@prefix", 1}), do: "prefix" def to_langtag({:"@prefix", 1}), do: "prefix"
def to_langtag({:"@base", 1}), do: "base" def to_langtag({:"@base", 1}), do: "base"
def bnode_str('_:' ++ value), do: List.to_string(value) def bnode_str('_:' ++ value), do: List.to_string(value)
def langtag_str('@' ++ value), do: List.to_string(value) def langtag_str('@' ++ value), do: List.to_string(value)
def quoted_content_str(value), do: value |> List.to_string |> String.slice(1..-2) def quoted_content_str(value), do: value |> List.to_string() |> String.slice(1..-2)
def long_quoted_content_str(value), do: value |> List.to_string |> String.slice(3..-4) def long_quoted_content_str(value), do: value |> List.to_string() |> String.slice(3..-4)
def prefix_ns(value), do: value |> List.to_string |> String.slice(0..-2) def prefix_ns(value), do: value |> List.to_string() |> String.slice(0..-2)
def prefix_ln(value), do: value |> List.to_string |> String.split(":", parts: 2) |> List.to_tuple
def prefix_ln(value),
do: value |> List.to_string() |> String.split(":", parts: 2) |> List.to_tuple()
def string_unescape(string), def string_unescape(string),
do: string |> unescape_8digit_unicode_seq |> Macro.unescape_string(&string_unescape_map(&1)) do: string |> unescape_8digit_unicode_seq |> Macro.unescape_string(&string_unescape_map(&1))
def iri_unescape(string), def iri_unescape(string),
do: string |> unescape_8digit_unicode_seq |> Macro.unescape_string(&iri_unescape_map(&1)) do: string |> unescape_8digit_unicode_seq |> Macro.unescape_string(&iri_unescape_map(&1))
defp string_unescape_map(?b), do: ?\b defp string_unescape_map(?b), do: ?\b
defp string_unescape_map(?f), do: ?\f defp string_unescape_map(?f), do: ?\f
defp string_unescape_map(?n), do: ?\n defp string_unescape_map(?n), do: ?\n
defp string_unescape_map(?r), do: ?\r defp string_unescape_map(?r), do: ?\r
defp string_unescape_map(?t), do: ?\t defp string_unescape_map(?t), do: ?\t
defp string_unescape_map(?u), do: true defp string_unescape_map(?u), do: true
defp string_unescape_map(:unicode), do: true defp string_unescape_map(:unicode), do: true
defp string_unescape_map(e), do: e defp string_unescape_map(e), do: e
defp iri_unescape_map(?u), do: true defp iri_unescape_map(?u), do: true
defp iri_unescape_map(:unicode), do: true defp iri_unescape_map(:unicode), do: true
defp iri_unescape_map(e), do: e defp iri_unescape_map(e), do: e
def unescape_8digit_unicode_seq(string) do def unescape_8digit_unicode_seq(string) do
String.replace(string, ~r/\\U([0-9]|[A-F]|[a-f]){2}(([0-9]|[A-F]|[a-f]){6})/, "\\u{\\2}") String.replace(string, ~r/\\U([0-9]|[A-F]|[a-f]){2}(([0-9]|[A-F]|[a-f]){6})/, "\\u{\\2}")
end end
def error_description(error_descriptor) when is_list(error_descriptor) do def error_description(error_descriptor) when is_list(error_descriptor) do
error_descriptor error_descriptor
|> Stream.map(&to_string/1) |> Stream.map(&to_string/1)

View File

@ -15,7 +15,7 @@ defmodule RDF.Serialization.Reader do
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec read_string(module, String.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec read_string(module, String.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_string(decoder, content, opts \\ []) do def read_string(decoder, content, opts \\ []) do
decoder.decode(content, opts) decoder.decode(content, opts)
end end
@ -25,7 +25,7 @@ defmodule RDF.Serialization.Reader do
As opposed to `read_string`, it raises an exception if an error occurs. As opposed to `read_string`, it raises an exception if an error occurs.
""" """
@spec read_string!(module, String.t, keyword) :: Graph.t | Dataset.t @spec read_string!(module, String.t(), keyword) :: Graph.t() | Dataset.t()
def read_string!(decoder, content, opts \\ []) do def read_string!(decoder, content, opts \\ []) do
decoder.decode!(content, opts) decoder.decode!(content, opts)
end end
@ -36,10 +36,10 @@ defmodule RDF.Serialization.Reader do
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec read_file(module, Path.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec read_file(module, Path.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_file(decoder, file, opts \\ []) do def read_file(decoder, file, opts \\ []) do
case File.read(file) do case File.read(file) do
{:ok, content} -> read_string(decoder, content, opts) {:ok, content} -> read_string(decoder, content, opts)
{:error, reason} -> {:error, reason} {:error, reason} -> {:error, reason}
end end
end end
@ -49,7 +49,7 @@ defmodule RDF.Serialization.Reader do
As opposed to `read_file`, it raises an exception if an error occurs. As opposed to `read_file`, it raises an exception if an error occurs.
""" """
@spec read_file!(module, Path.t, keyword) :: Graph.t | Dataset.t @spec read_file!(module, Path.t(), keyword) :: Graph.t() | Dataset.t()
def read_file!(decoder, file, opts \\ []) do def read_file!(decoder, file, opts \\ []) do
with content = File.read!(file) do with content = File.read!(file) do
read_string!(decoder, content, opts) read_string!(decoder, content, opts)

View File

@ -11,7 +11,7 @@ defmodule RDF.Serialization do
RDF.Turtle, RDF.Turtle,
JSON.LD, JSON.LD,
RDF.NTriples, RDF.NTriples,
RDF.NQuads, RDF.NQuads
] ]
@doc """ @doc """
@ -43,7 +43,7 @@ defmodule RDF.Serialization do
""" """
@spec available_formats :: [format] @spec available_formats :: [format]
def available_formats do def available_formats do
Enum.filter @formats, &Code.ensure_loaded?/1 Enum.filter(@formats, &Code.ensure_loaded?/1)
end end
@doc """ @doc """
@ -58,12 +58,12 @@ defmodule RDF.Serialization do
iex> RDF.Serialization.format(:jsonld) iex> RDF.Serialization.format(:jsonld)
nil # unless json_ld is defined as a dependency of the application nil # unless json_ld is defined as a dependency of the application
""" """
@spec format(String.t | atom) :: format | nil @spec format(String.t() | atom) :: format | nil
def format(name) def format(name)
def format(name) when is_binary(name) do def format(name) when is_binary(name) do
name name
|> String.to_existing_atom |> String.to_existing_atom()
|> format() |> format()
rescue rescue
ArgumentError -> nil ArgumentError -> nil
@ -73,7 +73,6 @@ defmodule RDF.Serialization do
format_where(fn format -> format.name == name end) format_where(fn format -> format.name == name end)
end end
@doc """ @doc """
Returns the `RDF.Serialization.Format` with the given media type, if available. Returns the `RDF.Serialization.Format` with the given media type, if available.
@ -84,7 +83,7 @@ defmodule RDF.Serialization do
iex> RDF.Serialization.format_by_media_type("application/ld+json") iex> RDF.Serialization.format_by_media_type("application/ld+json")
nil # unless json_ld is defined as a dependency of the application nil # unless json_ld is defined as a dependency of the application
""" """
@spec format_by_media_type(String.t) :: format | nil @spec format_by_media_type(String.t()) :: format | nil
def format_by_media_type(media_type) do def format_by_media_type(media_type) do
format_where(fn format -> format.media_type == media_type end) format_where(fn format -> format.media_type == media_type end)
end end
@ -101,7 +100,7 @@ defmodule RDF.Serialization do
iex> RDF.Serialization.format_by_extension("jsonld") iex> RDF.Serialization.format_by_extension("jsonld")
nil # unless json_ld is defined as a dependency of the application nil # unless json_ld is defined as a dependency of the application
""" """
@spec format_by_extension(String.t) :: format | nil @spec format_by_extension(String.t()) :: format | nil
def format_by_extension(extension) def format_by_extension(extension)
def format_by_extension("." <> extension), do: format_by_extension(extension) def format_by_extension("." <> extension), do: format_by_extension(extension)
@ -116,7 +115,6 @@ defmodule RDF.Serialization do
|> Enum.find(fun) |> Enum.find(fun)
end end
@doc """ @doc """
Reads and decodes a serialized graph or dataset from a string. Reads and decodes a serialized graph or dataset from a string.
@ -126,7 +124,7 @@ defmodule RDF.Serialization do
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec read_string(String.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec read_string(String.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_string(content, opts) do def read_string(content, opts) do
with {:ok, format} <- string_format(opts) do with {:ok, format} <- string_format(opts) do
format.read_string(content, opts) format.read_string(content, opts)
@ -141,7 +139,7 @@ defmodule RDF.Serialization do
As opposed to `read_string`, it raises an exception if an error occurs. As opposed to `read_string`, it raises an exception if an error occurs.
""" """
@spec read_string!(String.t, keyword) :: Graph.t | Dataset.t @spec read_string!(String.t(), keyword) :: Graph.t() | Dataset.t()
def read_string!(content, opts) do def read_string!(content, opts) do
with {:ok, format} <- string_format(opts) do with {:ok, format} <- string_format(opts) do
format.read_string!(content, opts) format.read_string!(content, opts)
@ -160,7 +158,7 @@ defmodule RDF.Serialization do
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec read_file(Path.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec read_file(Path.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def read_file(file, opts \\ []) do def read_file(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do with {:ok, format} <- file_format(file, opts) do
format.read_file(file, opts) format.read_file(file, opts)
@ -176,7 +174,7 @@ defmodule RDF.Serialization do
As opposed to `read_file`, it raises an exception if an error occurs. As opposed to `read_file`, it raises an exception if an error occurs.
""" """
@spec read_file!(Path.t, keyword) :: Graph.t | Dataset.t @spec read_file!(Path.t(), keyword) :: Graph.t() | Dataset.t()
def read_file!(file, opts \\ []) do def read_file!(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do with {:ok, format} <- file_format(file, opts) do
format.read_file!(file, opts) format.read_file!(file, opts)
@ -194,7 +192,7 @@ defmodule RDF.Serialization do
It returns an `{:ok, string}` tuple, with `string` being the serialized graph or It returns an `{:ok, string}` tuple, with `string` being the serialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec write_string(Graph.t | Dataset.t, keyword) :: {:ok, String.t} | {:error, any} @spec write_string(Graph.t() | Dataset.t(), keyword) :: {:ok, String.t()} | {:error, any}
def write_string(data, opts) do def write_string(data, opts) do
with {:ok, format} <- string_format(opts) do with {:ok, format} <- string_format(opts) do
format.write_string(data, opts) format.write_string(data, opts)
@ -209,7 +207,7 @@ defmodule RDF.Serialization do
As opposed to `write_string`, it raises an exception if an error occurs. As opposed to `write_string`, it raises an exception if an error occurs.
""" """
@spec write_string!(Graph.t | Dataset.t, keyword) :: String.t @spec write_string!(Graph.t() | Dataset.t(), keyword) :: String.t()
def write_string!(data, opts) do def write_string!(data, opts) do
with {:ok, format} <- string_format(opts) do with {:ok, format} <- string_format(opts) do
format.write_string!(data, opts) format.write_string!(data, opts)
@ -234,7 +232,7 @@ defmodule RDF.Serialization do
It returns `:ok` if successful or `{:error, reason}` if an error occurs. It returns `:ok` if successful or `{:error, reason}` if an error occurs.
""" """
@spec write_file(Graph.t | Dataset.t, Path.t, keyword) :: :ok | {:error, any} @spec write_file(Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok | {:error, any}
def write_file(data, path, opts \\ []) do def write_file(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do with {:ok, format} <- file_format(path, opts) do
format.write_file(data, path, opts) format.write_file(data, path, opts)
@ -252,7 +250,7 @@ defmodule RDF.Serialization do
As opposed to `write_file`, it raises an exception if an error occurs. As opposed to `write_file`, it raises an exception if an error occurs.
""" """
@spec write_file!(Graph.t | Dataset.t, Path.t, keyword) :: :ok @spec write_file!(Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok
def write_file!(data, path, opts \\ []) do def write_file!(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do with {:ok, format} <- file_format(path, opts) do
format.write_file!(data, path, opts) format.write_file!(data, path, opts)
@ -261,12 +259,10 @@ defmodule RDF.Serialization do
end end
end end
defp string_format(opts) do defp string_format(opts) do
if format = if format =
(opts |> Keyword.get(:format) |> format()) || opts |> Keyword.get(:format) |> format() ||
(opts |> Keyword.get(:media_type) |> format_by_media_type()) opts |> Keyword.get(:media_type) |> format_by_media_type() do
do
{:ok, format} {:ok, format}
else else
{:error, "unable to detect serialization format"} {:error, "unable to detect serialization format"}
@ -276,7 +272,7 @@ defmodule RDF.Serialization do
defp file_format(filename, opts) do defp file_format(filename, opts) do
case string_format(opts) do case string_format(opts) do
{:ok, format} -> {:ok, format} {:ok, format} -> {:ok, format}
_ -> format_by_file_name(filename) _ -> format_by_file_name(filename)
end end
end end
@ -287,5 +283,4 @@ defmodule RDF.Serialization do
{:error, "unable to detect serialization format"} {:error, "unable to detect serialization format"}
end end
end end
end end

View File

@ -15,7 +15,8 @@ defmodule RDF.Serialization.Writer do
It returns an `{:ok, string}` tuple, with `string` being the serialized graph or It returns an `{:ok, string}` tuple, with `string` being the serialized graph or
dataset, or `{:error, reason}` if an error occurs. dataset, or `{:error, reason}` if an error occurs.
""" """
@spec write_string(module, Graph.t | Dataset.t, keyword) :: {:ok, String.t} | {:error, any} @spec write_string(module, Graph.t() | Dataset.t(), keyword) ::
{:ok, String.t()} | {:error, any}
def write_string(encoder, data, opts \\ []) do def write_string(encoder, data, opts \\ []) do
encoder.encode(data, opts) encoder.encode(data, opts)
end end
@ -25,7 +26,7 @@ defmodule RDF.Serialization.Writer do
As opposed to `write_string`, it raises an exception if an error occurs. As opposed to `write_string`, it raises an exception if an error occurs.
""" """
@spec write_string!(module, Graph.t | Dataset.t, keyword) :: String.t @spec write_string!(module, Graph.t() | Dataset.t(), keyword) :: String.t()
def write_string!(encoder, data, opts \\ []) do def write_string!(encoder, data, opts \\ []) do
encoder.encode!(data, opts) encoder.encode!(data, opts)
end end
@ -42,7 +43,7 @@ defmodule RDF.Serialization.Writer do
It returns `:ok` if successful or `{:error, reason}` if an error occurs. It returns `:ok` if successful or `{:error, reason}` if an error occurs.
""" """
@spec write_file(module, Graph.t | Dataset.t, Path.t, keyword) :: :ok | {:error, any} @spec write_file(module, Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok | {:error, any}
def write_file(encoder, data, path, opts \\ []) do def write_file(encoder, data, path, opts \\ []) do
with {:ok, encoded_string} <- write_string(encoder, data, opts) do with {:ok, encoded_string} <- write_string(encoder, data, opts) do
File.write(path, encoded_string, file_mode(encoder, opts)) File.write(path, encoded_string, file_mode(encoder, opts))
@ -56,7 +57,7 @@ defmodule RDF.Serialization.Writer do
As opposed to `write_file`, it raises an exception if an error occurs. As opposed to `write_file`, it raises an exception if an error occurs.
""" """
@spec write_file!(module, Graph.t | Dataset.t, Path.t, keyword) :: :ok @spec write_file!(module, Graph.t() | Dataset.t(), Path.t(), keyword) :: :ok
def write_file!(encoder, data, path, opts \\ []) do def write_file!(encoder, data, path, opts \\ []) do
with encoded_string = write_string!(encoder, data, opts) do with encoded_string = write_string!(encoder, data, opts) do
File.write!(path, encoded_string, file_mode(encoder, opts)) File.write!(path, encoded_string, file_mode(encoder, opts))

View File

@ -17,9 +17,8 @@ defmodule RDF.NQuads do
import RDF.Sigils import RDF.Sigils
@id ~I<http://www.w3.org/ns/formats/N-Quads> @id ~I<http://www.w3.org/ns/formats/N-Quads>
@name :nquads @name :nquads
@extension "nq" @extension "nq"
@media_type "application/n-quads" @media_type "application/n-quads"
end end

View File

@ -8,26 +8,29 @@ defmodule RDF.NQuads.Decoder do
alias RDF.{Dataset, Graph} alias RDF.{Dataset, Graph}
@impl RDF.Serialization.Decoder @impl RDF.Serialization.Decoder
@spec decode(String.t, keyword | map) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec decode(String.t(), keyword | map) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def decode(content, _opts \\ []) do def decode(content, _opts \\ []) do
with {:ok, tokens, _} <- tokenize(content), with {:ok, tokens, _} <- tokenize(content),
{:ok, ast} <- parse(tokens) do {:ok, ast} <- parse(tokens) do
{:ok, build_dataset(ast)} {:ok, build_dataset(ast)}
else else
{:error, {error_line, :ntriples_lexer, error_descriptor}, _error_line_again} -> {:error, {error_line, :ntriples_lexer, error_descriptor}, _error_line_again} ->
{:error, "N-Quad scanner error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"N-Quad scanner error on line #{error_line}: #{error_description(error_descriptor)}"}
{:error, {error_line, :nquads_parser, error_descriptor}} -> {:error, {error_line, :nquads_parser, error_descriptor}} ->
{:error, "N-Quad parser error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"N-Quad parser error on line #{error_line}: #{error_description(error_descriptor)}"}
end end
end end
defp tokenize(content), do: content |> to_charlist |> :ntriples_lexer.string defp tokenize(content), do: content |> to_charlist |> :ntriples_lexer.string()
defp parse(tokens), do: tokens |> :nquads_parser.parse defp parse(tokens), do: tokens |> :nquads_parser.parse()
defp build_dataset(ast) do defp build_dataset(ast) do
Enum.reduce ast, RDF.Dataset.new, fn(quad, dataset) -> Enum.reduce(ast, RDF.Dataset.new(), fn quad, dataset ->
RDF.Dataset.add(dataset, quad) RDF.Dataset.add(dataset, quad)
end end)
end end
end end

View File

@ -6,31 +6,32 @@ defmodule RDF.NQuads.Encoder do
alias RDF.{Dataset, Graph, Quad, Statement, Triple} alias RDF.{Dataset, Graph, Quad, Statement, Triple}
@impl RDF.Serialization.Encoder @impl RDF.Serialization.Encoder
@callback encode(Graph.t | Dataset.t, keyword | map) :: {:ok, String.t} | {:error, any} @callback encode(Graph.t() | Dataset.t(), keyword | map) :: {:ok, String.t()} | {:error, any}
def encode(data, _opts \\ []) do def encode(data, _opts \\ []) do
result = result =
data data
|> Enum.reduce([], fn (statement, result) -> [statement(statement) | result] end) |> Enum.reduce([], fn statement, result -> [statement(statement) | result] end)
|> Enum.reverse |> Enum.reverse()
|> Enum.join("\n") |> Enum.join("\n")
{:ok, (if result == "", do: result, else: "#{result}\n")}
{:ok, if(result == "", do: result, else: "#{result}\n")}
end end
@spec statement({Statement.subject, Statement.predicate, Statement.object, nil}) :: String.t @spec statement({Statement.subject(), Statement.predicate(), Statement.object(), nil}) ::
String.t()
def statement({subject, predicate, object, nil}) do def statement({subject, predicate, object, nil}) do
statement({subject, predicate, object}) statement({subject, predicate, object})
end end
@spec statement(Quad.t) :: String.t @spec statement(Quad.t()) :: String.t()
def statement({subject, predicate, object, graph}) do def statement({subject, predicate, object, graph}) do
"#{term(subject)} #{term(predicate)} #{term(object)} #{term(graph)} ." "#{term(subject)} #{term(predicate)} #{term(object)} #{term(graph)} ."
end end
@spec statement(Triple.t) :: String.t @spec statement(Triple.t()) :: String.t()
def statement({subject, predicate, object}) do def statement({subject, predicate, object}) do
"#{term(subject)} #{term(predicate)} #{term(object)} ." "#{term(subject)} #{term(predicate)} #{term(object)} ."
end end
defdelegate term(value), to: RDF.NTriples.Encoder defdelegate term(value), to: RDF.NTriples.Encoder
end end

View File

@ -19,9 +19,8 @@ defmodule RDF.NTriples do
import RDF.Sigils import RDF.Sigils
@id ~I<http://www.w3.org/ns/formats/N-Triples> @id ~I<http://www.w3.org/ns/formats/N-Triples>
@name :ntriples @name :ntriples
@extension "nt" @extension "nt"
@media_type "application/n-triples" @media_type "application/n-triples"
end end

View File

@ -8,26 +8,29 @@ defmodule RDF.NTriples.Decoder do
alias RDF.{Dataset, Graph} alias RDF.{Dataset, Graph}
@impl RDF.Serialization.Decoder @impl RDF.Serialization.Decoder
@spec decode(String.t, keyword | map) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec decode(String.t(), keyword | map) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def decode(content, _opts \\ []) do def decode(content, _opts \\ []) do
with {:ok, tokens, _} <- tokenize(content), with {:ok, tokens, _} <- tokenize(content),
{:ok, ast} <- parse(tokens) do {:ok, ast} <- parse(tokens) do
{:ok, build_graph(ast)} {:ok, build_graph(ast)}
else else
{:error, {error_line, :ntriples_lexer, error_descriptor}, _error_line_again} -> {:error, {error_line, :ntriples_lexer, error_descriptor}, _error_line_again} ->
{:error, "N-Triple scanner error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"N-Triple scanner error on line #{error_line}: #{error_description(error_descriptor)}"}
{:error, {error_line, :ntriples_parser, error_descriptor}} -> {:error, {error_line, :ntriples_parser, error_descriptor}} ->
{:error, "N-Triple parser error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"N-Triple parser error on line #{error_line}: #{error_description(error_descriptor)}"}
end end
end end
defp tokenize(content), do: content |> to_charlist |> :ntriples_lexer.string defp tokenize(content), do: content |> to_charlist |> :ntriples_lexer.string()
defp parse(tokens), do: tokens |> :ntriples_parser.parse defp parse(tokens), do: tokens |> :ntriples_parser.parse()
defp build_graph(ast) do defp build_graph(ast) do
Enum.reduce ast, RDF.Graph.new, fn(triple, graph) -> Enum.reduce(ast, RDF.Graph.new(), fn triple, graph ->
RDF.Graph.add(graph, triple) RDF.Graph.add(graph, triple)
end end)
end end
end end

View File

@ -6,24 +6,25 @@ defmodule RDF.NTriples.Encoder do
alias RDF.{BlankNode, Dataset, Graph, IRI, XSD, Literal, Statement, Triple, LangString} alias RDF.{BlankNode, Dataset, Graph, IRI, XSD, Literal, Statement, Triple, LangString}
@impl RDF.Serialization.Encoder @impl RDF.Serialization.Encoder
@callback encode(Graph.t | Dataset.t, keyword | map) :: {:ok, String.t} | {:error, any} @callback encode(Graph.t() | Dataset.t(), keyword | map) :: {:ok, String.t()} | {:error, any}
def encode(data, _opts \\ []) do def encode(data, _opts \\ []) do
result = result =
data data
|> Enum.reduce([], fn (statement, result) -> |> Enum.reduce([], fn statement, result ->
[statement(statement) | result] [statement(statement) | result]
end) end)
|> Enum.reverse |> Enum.reverse()
|> Enum.join("\n") |> Enum.join("\n")
{:ok, (if result == "", do: result, else: result <> "\n")}
{:ok, if(result == "", do: result, else: result <> "\n")}
end end
@spec statement(Triple.t) :: String.t @spec statement(Triple.t()) :: String.t()
def statement({subject, predicate, object}) do def statement({subject, predicate, object}) do
"#{term(subject)} #{term(predicate)} #{term(object)} ." "#{term(subject)} #{term(predicate)} #{term(object)} ."
end end
@spec term(Statement.subject | Statement.predicate | Statement.object) :: String.t @spec term(Statement.subject() | Statement.predicate() | Statement.object()) :: String.t()
def term(%IRI{} = iri) do def term(%IRI{} = iri) do
"<#{to_string(iri)}>" "<#{to_string(iri)}>"
end end
@ -43,5 +44,4 @@ defmodule RDF.NTriples.Encoder do
def term(%BlankNode{} = bnode) do def term(%BlankNode{} = bnode) do
to_string(bnode) to_string(bnode)
end end
end end

View File

@ -10,9 +10,8 @@ defmodule RDF.Turtle do
import RDF.Sigils import RDF.Sigils
@id ~I<http://www.w3.org/ns/formats/Turtle> @id ~I<http://www.w3.org/ns/formats/Turtle>
@name :turtle @name :turtle
@extension "ttl" @extension "ttl"
@media_type "text/turtle" @media_type "text/turtle"
end end

View File

@ -19,13 +19,12 @@ defmodule RDF.Turtle.Decoder do
end end
def next_bnode(%State{bnode_counter: bnode_counter} = state) do def next_bnode(%State{bnode_counter: bnode_counter} = state) do
{RDF.bnode("b#{bnode_counter}"), {RDF.bnode("b#{bnode_counter}"), %State{state | bnode_counter: bnode_counter + 1}}
%State{state | bnode_counter: bnode_counter + 1}}
end end
end end
@impl RDF.Serialization.Decoder @impl RDF.Serialization.Decoder
@spec decode(String.t, keyword | map) :: {:ok, Graph.t | Dataset.t} | {:error, any} @spec decode(String.t(), keyword | map) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
def decode(content, opts \\ %{}) def decode(content, opts \\ %{})
def decode(content, opts) when is_list(opts), def decode(content, opts) when is_list(opts),
@ -33,25 +32,28 @@ defmodule RDF.Turtle.Decoder do
def decode(content, opts) do def decode(content, opts) do
with {:ok, tokens, _} <- tokenize(content), with {:ok, tokens, _} <- tokenize(content),
{:ok, ast} <- parse(tokens), {:ok, ast} <- parse(tokens),
base_iri = Map.get(opts, :base, Map.get(opts, :base_iri, RDF.default_base_iri())) do base_iri = Map.get(opts, :base, Map.get(opts, :base_iri, RDF.default_base_iri())) do
build_graph(ast, base_iri && RDF.iri(base_iri)) build_graph(ast, base_iri && RDF.iri(base_iri))
else else
{:error, {error_line, :turtle_lexer, error_descriptor}, _error_line_again} -> {:error, {error_line, :turtle_lexer, error_descriptor}, _error_line_again} ->
{:error, "Turtle scanner error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"Turtle scanner error on line #{error_line}: #{error_description(error_descriptor)}"}
{:error, {error_line, :turtle_parser, error_descriptor}} -> {:error, {error_line, :turtle_parser, error_descriptor}} ->
{:error, "Turtle parser error on line #{error_line}: #{error_description error_descriptor}"} {:error,
"Turtle parser error on line #{error_line}: #{error_description(error_descriptor)}"}
end end
end end
def tokenize(content), do: content |> to_charlist |> :turtle_lexer.string def tokenize(content), do: content |> to_charlist |> :turtle_lexer.string()
def parse([]), do: {:ok, []} def parse([]), do: {:ok, []}
def parse(tokens), do: tokens |> :turtle_parser.parse def parse(tokens), do: tokens |> :turtle_parser.parse()
defp build_graph(ast, base_iri) do defp build_graph(ast, base_iri) do
{graph, %State{namespaces: namespaces, base_iri: base_iri}} = {graph, %State{namespaces: namespaces, base_iri: base_iri}} =
Enum.reduce ast, {RDF.Graph.new, %State{base_iri: base_iri}}, fn Enum.reduce(ast, {RDF.Graph.new(), %State{base_iri: base_iri}}, fn
{:triples, triples_ast}, {graph, state} -> {:triples, triples_ast}, {graph, state} ->
with {statements, state} = triples(triples_ast, state) do with {statements, state} = triples(triples_ast, state) do
{RDF.Graph.add(graph, statements), state} {RDF.Graph.add(graph, statements), state}
@ -59,16 +61,15 @@ defmodule RDF.Turtle.Decoder do
{:directive, directive_ast}, {graph, state} -> {:directive, directive_ast}, {graph, state} ->
{graph, directive(directive_ast, state)} {graph, directive(directive_ast, state)}
end end)
{:ok, {:ok,
if Enum.empty?(namespaces) do if Enum.empty?(namespaces) do
graph graph
else else
RDF.Graph.add_prefixes(graph, namespaces) RDF.Graph.add_prefixes(graph, namespaces)
end end
|> RDF.Graph.set_base_iri(base_iri) |> RDF.Graph.set_base_iri(base_iri)}
}
rescue rescue
error -> {:error, Exception.message(error)} error -> {:error, Exception.message(error)}
end end
@ -87,16 +88,17 @@ defmodule RDF.Turtle.Decoder do
cond do cond do
IRI.absolute?(iri) -> IRI.absolute?(iri) ->
%State{state | base_iri: RDF.iri(iri)} %State{state | base_iri: RDF.iri(iri)}
base_iri != nil -> base_iri != nil ->
with absolute_iri = IRI.absolute(iri, base_iri) do with absolute_iri = IRI.absolute(iri, base_iri) do
%State{state | base_iri: absolute_iri} %State{state | base_iri: absolute_iri}
end end
true -> true ->
raise "Could not resolve relative IRI '#{iri}', no base iri provided" raise "Could not resolve relative IRI '#{iri}', no base iri provided"
end end
end end
defp triples({:blankNodePropertyList, _} = ast, state) do defp triples({:blankNodePropertyList, _} = ast, state) do
with {_, statements, state} = resolve_node(ast, [], state) do with {_, statements, state} = resolve_node(ast, [], state) do
{statements, state} {statements, state}
@ -105,15 +107,16 @@ defmodule RDF.Turtle.Decoder do
defp triples({subject, predications}, state) do defp triples({subject, predications}, state) do
with {subject, statements, state} = resolve_node(subject, [], state) do with {subject, statements, state} = resolve_node(subject, [], state) do
Enum.reduce predications, {statements, state}, fn {predicate, objects}, {statements, state} -> Enum.reduce(predications, {statements, state}, fn {predicate, objects},
{statements, state} ->
with {predicate, statements, state} = resolve_node(predicate, statements, state) do with {predicate, statements, state} = resolve_node(predicate, statements, state) do
Enum.reduce objects, {statements, state}, fn object, {statements, state} -> Enum.reduce(objects, {statements, state}, fn object, {statements, state} ->
with {object, statements, state} = resolve_node(object, statements, state) do with {object, statements, state} = resolve_node(object, statements, state) do
{[{subject, predicate, object} | statements], state} {[{subject, predicate, object} | statements], state}
end end
end end)
end end
end end)
end end
end end
@ -121,7 +124,7 @@ defmodule RDF.Turtle.Decoder do
if ns = State.ns(state, prefix) do if ns = State.ns(state, prefix) do
{RDF.iri(ns <> local_name_unescape(name)), statements, state} {RDF.iri(ns <> local_name_unescape(name)), statements, state}
else else
raise "line #{line_number}: undefined prefix #{inspect prefix}" raise "line #{line_number}: undefined prefix #{inspect(prefix)}"
end end
end end
@ -129,7 +132,7 @@ defmodule RDF.Turtle.Decoder do
if ns = State.ns(state, prefix) do if ns = State.ns(state, prefix) do
{RDF.iri(ns), statements, state} {RDF.iri(ns), statements, state}
else else
raise "line #{line_number}: undefined prefix #{inspect prefix}" raise "line #{line_number}: undefined prefix #{inspect(prefix)}"
end end
end end
@ -154,35 +157,43 @@ defmodule RDF.Turtle.Decoder do
end end
end end
defp resolve_node({{:string_literal_quote, _line, value}, {:datatype, datatype}}, statements, state) do defp resolve_node(
{{:string_literal_quote, _line, value}, {:datatype, datatype}},
statements,
state
) do
with {datatype, statements, state} = resolve_node(datatype, statements, state) do with {datatype, statements, state} = resolve_node(datatype, statements, state) do
{RDF.literal(value, datatype: datatype), statements, state} {RDF.literal(value, datatype: datatype), statements, state}
end end
end end
defp resolve_node({:collection, []}, statements, state) do defp resolve_node({:collection, []}, statements, state) do
{RDF.nil, statements, state} {RDF.nil(), statements, state}
end end
defp resolve_node({:collection, elements}, statements, state) do defp resolve_node({:collection, elements}, statements, state) do
with {first_list_node, state} = State.next_bnode(state), with {first_list_node, state} = State.next_bnode(state),
[first_element | rest_elements] = elements, [first_element | rest_elements] = elements,
{first_element_node, statements, state} = {first_element_node, statements, state} = resolve_node(first_element, statements, state),
resolve_node(first_element, statements, state), first_statement = [{first_list_node, RDF.first(), first_element_node}] do
first_statement = [{first_list_node, RDF.first, first_element_node}] do
{last_list_node, statements, state} = {last_list_node, statements, state} =
Enum.reduce rest_elements, {first_list_node, statements ++ first_statement, state}, Enum.reduce(
rest_elements,
{first_list_node, statements ++ first_statement, state},
fn element, {list_node, statements, state} -> fn element, {list_node, statements, state} ->
with {element_node, statements, state} = with {element_node, statements, state} = resolve_node(element, statements, state),
resolve_node(element, statements, state),
{next_list_node, state} = State.next_bnode(state) do {next_list_node, state} = State.next_bnode(state) do
{next_list_node, statements ++ [ {next_list_node,
{list_node, RDF.rest, next_list_node}, statements ++
{next_list_node, RDF.first, element_node}, [
], state} {list_node, RDF.rest(), next_list_node},
{next_list_node, RDF.first(), element_node}
], state}
end end
end end
{first_list_node, statements ++ [{last_list_node, RDF.rest, RDF.nil}], state} )
{first_list_node, statements ++ [{last_list_node, RDF.rest(), RDF.nil()}], state}
end end
end end
@ -195,5 +206,4 @@ defmodule RDF.Turtle.Decoder do
defp local_name_unescape_map(e) when e in @reserved_characters, do: e defp local_name_unescape_map(e) when e in @reserved_characters, do: e
defp local_name_unescape_map(_), do: false defp local_name_unescape_map(_), do: false
end end

View File

@ -29,23 +29,22 @@ defmodule RDF.Turtle.Encoder do
] ]
@ordered_properties MapSet.new(@predicate_order) @ordered_properties MapSet.new(@predicate_order)
@impl RDF.Serialization.Encoder @impl RDF.Serialization.Encoder
@callback encode(Graph.t | Dataset.t, keyword | map) :: {:ok, String.t} | {:error, any} @callback encode(Graph.t() | Dataset.t(), keyword | map) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ []) do def encode(data, opts \\ []) do
with base = Keyword.get(opts, :base, Keyword.get(opts, :base_iri)) with base =
|> base_iri(data) |> init_base_iri(), Keyword.get(opts, :base, Keyword.get(opts, :base_iri))
prefixes = Keyword.get(opts, :prefixes) |> base_iri(data)
|> prefixes(data) |> init_prefixes(), |> init_base_iri(),
prefixes = Keyword.get(opts, :prefixes) |> prefixes(data) |> init_prefixes(),
{:ok, state} = State.start_link(data, base, prefixes) do {:ok, state} = State.start_link(data, base, prefixes) do
try do try do
State.preprocess(state) State.preprocess(state)
{:ok, {:ok,
base_directive(base) <> base_directive(base) <>
prefix_directives(prefixes) <> prefix_directives(prefixes) <>
graph_statements(state) graph_statements(state)}
}
after after
State.stop(state) State.stop(state)
end end
@ -60,6 +59,7 @@ defmodule RDF.Turtle.Encoder do
defp init_base_iri(base_iri) do defp init_base_iri(base_iri) do
base_iri = to_string(base_iri) base_iri = to_string(base_iri)
if String.ends_with?(base_iri, ~w[/ #]) do if String.ends_with?(base_iri, ~w[/ #]) do
{:ok, base_iri} {:ok, base_iri}
else else
@ -73,28 +73,26 @@ defmodule RDF.Turtle.Encoder do
defp prefixes(prefixes, _), do: RDF.PrefixMap.new(prefixes) defp prefixes(prefixes, _), do: RDF.PrefixMap.new(prefixes)
defp init_prefixes(prefixes) do defp init_prefixes(prefixes) do
Enum.reduce prefixes, %{}, fn {prefix, iri}, reverse -> Enum.reduce(prefixes, %{}, fn {prefix, iri}, reverse ->
Map.put(reverse, iri, to_string(prefix)) Map.put(reverse, iri, to_string(prefix))
end end)
end end
defp base_directive(nil), do: ""
defp base_directive(nil), do: "" defp base_directive({_, base}), do: "@base <#{base}> .\n"
defp base_directive({_, base}), do: "@base <#{base}> .\n"
defp prefix_directive({ns, prefix}), do: "@prefix #{prefix}: <#{to_string(ns)}> .\n" defp prefix_directive({ns, prefix}), do: "@prefix #{prefix}: <#{to_string(ns)}> .\n"
defp prefix_directives(prefixes) do defp prefix_directives(prefixes) do
case Enum.map(prefixes, &prefix_directive/1) do case Enum.map(prefixes, &prefix_directive/1) do
[] -> "" [] -> ""
prefixes -> Enum.join(prefixes, "") <> "\n" prefixes -> Enum.join(prefixes, "") <> "\n"
end end
end end
defp graph_statements(state) do defp graph_statements(state) do
State.data(state) State.data(state)
|> RDF.Data.descriptions |> RDF.Data.descriptions()
|> order_descriptions(state) |> order_descriptions(state)
|> Enum.map(&description_statements(&1, state)) |> Enum.map(&description_statements(&1, state))
|> Enum.reject(&is_nil/1) |> Enum.reject(&is_nil/1)
@ -103,49 +101,54 @@ defmodule RDF.Turtle.Encoder do
defp order_descriptions(descriptions, state) do defp order_descriptions(descriptions, state) do
base_iri = State.base_iri(state) base_iri = State.base_iri(state)
group = group =
Enum.group_by descriptions, fn Enum.group_by(descriptions, fn
%Description{subject: ^base_iri} -> %Description{subject: ^base_iri} ->
:base :base
description -> description ->
with types when not is_nil(types) <- description.predications[@rdf_type] do with types when not is_nil(types) <- description.predications[@rdf_type] do
Enum.find @top_classes, :other, fn top_class -> Enum.find(@top_classes, :other, fn top_class ->
Map.has_key?(types, top_class) Map.has_key?(types, top_class)
end end)
else else
_ -> :other _ -> :other
end end
end end)
ordered_descriptions = (
@top_classes ordered_descriptions =
|> Stream.map(fn top_class -> group[top_class] end) (@top_classes
|> Stream.reject(&is_nil/1) |> Stream.map(fn top_class -> group[top_class] end)
|> Stream.map(&sort_description_group/1) |> Stream.reject(&is_nil/1)
|> Enum.reduce([], fn class_group, ordered_descriptions -> |> Stream.map(&sort_description_group/1)
ordered_descriptions ++ class_group |> Enum.reduce([], fn class_group, ordered_descriptions ->
end) ordered_descriptions ++ class_group
) ++ (group |> Map.get(:other, []) |> sort_description_group()) end)) ++ (group |> Map.get(:other, []) |> sort_description_group())
case group[:base] do case group[:base] do
[base] -> [base | ordered_descriptions] [base] -> [base | ordered_descriptions]
_ -> ordered_descriptions _ -> ordered_descriptions
end end
end end
defp sort_description_group(descriptions) do defp sort_description_group(descriptions) do
Enum.sort descriptions, fn Enum.sort(descriptions, fn
%Description{subject: %IRI{}}, %Description{subject: %BlankNode{}} -> true %Description{subject: %IRI{}}, %Description{subject: %BlankNode{}} ->
%Description{subject: %BlankNode{}}, %Description{subject: %IRI{}} -> false true
%Description{subject: %BlankNode{}}, %Description{subject: %IRI{}} ->
false
%Description{subject: s1}, %Description{subject: s2} -> %Description{subject: s1}, %Description{subject: s2} ->
to_string(s1) < to_string(s2) to_string(s1) < to_string(s2)
end end)
end end
defp description_statements(description, state, nesting \\ 0) do defp description_statements(description, state, nesting \\ 0) do
with %BlankNode{} <- description.subject, with %BlankNode{} <- description.subject,
ref_count when ref_count < 2 <- ref_count when ref_count < 2 <-
State.bnode_ref_counter(state, description.subject) State.bnode_ref_counter(state, description.subject) do
do
unrefed_bnode_subject_term(description, ref_count, state, nesting) unrefed_bnode_subject_term(description, ref_count, state, nesting)
else else
_ -> full_description_statements(description, state, nesting) _ -> full_description_statements(description, state, nesting)
@ -154,9 +157,7 @@ defmodule RDF.Turtle.Encoder do
defp full_description_statements(subject, description, state, nesting) do defp full_description_statements(subject, description, state, nesting) do
with nesting = nesting + @indentation do with nesting = nesting + @indentation do
subject <> newline_indent(nesting) <> ( subject <> newline_indent(nesting) <> predications(description, state, nesting) <> " .\n"
predications(description, state, nesting)
) <> " .\n"
end end
end end
@ -167,7 +168,8 @@ defmodule RDF.Turtle.Encoder do
defp blank_node_property_list(description, state, nesting) do defp blank_node_property_list(description, state, nesting) do
with indented = nesting + @indentation do with indented = nesting + @indentation do
"[" <> newline_indent(indented) <> "[" <>
newline_indent(indented) <>
predications(description, state, indented) <> predications(description, state, indented) <>
newline_indent(nesting) <> "]" newline_indent(nesting) <> "]"
end end
@ -196,14 +198,14 @@ defmodule RDF.Turtle.Encoder do
end end
defp predication({predicate, objects}, state, nesting) do defp predication({predicate, objects}, state, nesting) do
term(predicate, state, :predicate, nesting) <> " " <> ( term(predicate, state, :predicate, nesting) <>
objects " " <>
(objects
|> Enum.map(fn {object, _} -> term(object, state, :object, nesting) end) |> Enum.map(fn {object, _} -> term(object, state, :object, nesting) end)
|> Enum.join(", ") # TODO: split if the line gets too long # TODO: split if the line gets too long
) |> Enum.join(", "))
end end
defp unrefed_bnode_subject_term(bnode_description, ref_count, state, nesting) do defp unrefed_bnode_subject_term(bnode_description, ref_count, state, nesting) do
if valid_list_node?(bnode_description.subject, state) do if valid_list_node?(bnode_description.subject, state) do
case ref_count do case ref_count do
@ -211,9 +213,14 @@ defmodule RDF.Turtle.Encoder do
bnode_description.subject bnode_description.subject
|> list_term(state, nesting) |> list_term(state, nesting)
|> full_description_statements( |> full_description_statements(
list_subject_description(bnode_description), state, nesting) list_subject_description(bnode_description),
state,
nesting
)
1 -> 1 ->
nil nil
_ -> _ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error." raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end end
@ -221,8 +228,10 @@ defmodule RDF.Turtle.Encoder do
case ref_count do case ref_count do
0 -> 0 ->
blank_node_property_list(bnode_description, state, nesting) <> " .\n" blank_node_property_list(bnode_description, state, nesting) <> " .\n"
1 -> 1 ->
nil nil
_ -> _ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error." raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end end
@ -231,7 +240,7 @@ defmodule RDF.Turtle.Encoder do
@dialyzer {:nowarn_function, list_subject_description: 1} @dialyzer {:nowarn_function, list_subject_description: 1}
defp list_subject_description(description) do defp list_subject_description(description) do
with description = Description.delete_predicates(description, [RDF.first, RDF.rest]) do with description = Description.delete_predicates(description, [RDF.first(), RDF.rest()]) do
if Enum.count(description.predications) == 0 do if Enum.count(description.predications) == 0 do
# since the Turtle grammar doesn't allow bare lists, we add a statement # since the Turtle grammar doesn't allow bare lists, we add a statement
description |> RDF.type(RDF.List) description |> RDF.type(RDF.List)
@ -256,7 +265,7 @@ defmodule RDF.Turtle.Encoder do
end end
defp valid_list_node?(bnode, state) do defp valid_list_node?(bnode, state) do
MapSet.member?(State.list_nodes(state), bnode) MapSet.member?(State.list_nodes(state), bnode)
end end
defp list_term(head, state, nesting) do defp list_term(head, state, nesting) do
@ -265,9 +274,8 @@ defmodule RDF.Turtle.Encoder do
|> term(state, :list, nesting) |> term(state, :list, nesting)
end end
defp term(@rdf_type, _, :predicate, _), do: "a" defp term(@rdf_type, _, :predicate, _), do: "a"
defp term(@rdf_nil, _, _, _), do: "()" defp term(@rdf_nil, _, _, _), do: "()"
defp term(%IRI{} = iri, state, _, _) do defp term(%IRI{} = iri, state, _, _) do
based_name(iri, State.base(state)) || based_name(iri, State.base(state)) ||
@ -276,7 +284,7 @@ defmodule RDF.Turtle.Encoder do
end end
defp term(%BlankNode{} = bnode, state, position, nesting) defp term(%BlankNode{} = bnode, state, position, nesting)
when position in ~w[object list]a do when position in ~w[object list]a do
if (ref_count = State.bnode_ref_counter(state, bnode)) <= 1 do if (ref_count = State.bnode_ref_counter(state, bnode)) <= 1 do
unrefed_bnode_object_term(bnode, ref_count, state, nesting) unrefed_bnode_object_term(bnode, ref_count, state, nesting)
else else
@ -296,7 +304,7 @@ defmodule RDF.Turtle.Encoder do
end end
defp term(%Literal{literal: %datatype{}} = literal, state, _, nesting) defp term(%Literal{literal: %datatype{}} = literal, state, _, nesting)
when datatype in @native_supported_datatypes do when datatype in @native_supported_datatypes do
if Literal.valid?(literal) do if Literal.valid?(literal) do
Literal.canonical_lexical(literal) Literal.canonical_lexical(literal)
else else
@ -309,15 +317,14 @@ defmodule RDF.Turtle.Encoder do
defp term(list, state, _, nesting) when is_list(list) do defp term(list, state, _, nesting) when is_list(list) do
"(" <> "(" <>
( (list
list |> Enum.map(&term(&1, state, :list, nesting))
|> Enum.map(&term(&1, state, :list, nesting)) |> Enum.join(" ")) <>
|> Enum.join(" ")
) <>
")" ")"
end end
defp based_name(%IRI{} = iri, base), do: based_name(to_string(iri), base) defp based_name(%IRI{} = iri, base), do: based_name(to_string(iri), base)
defp based_name(iri, {:ok, base}) do defp based_name(iri, {:ok, base}) do
if String.starts_with?(iri, base) do if String.starts_with?(iri, base) do
"<#{String.slice(iri, String.length(base)..-1)}>" "<#{String.slice(iri, String.length(base)..-1)}>"
@ -326,22 +333,23 @@ defmodule RDF.Turtle.Encoder do
defp based_name(_, _), do: nil defp based_name(_, _), do: nil
defp typed_literal_term(%Literal{} = literal, state, nesting), defp typed_literal_term(%Literal{} = literal, state, nesting),
do: ~s["#{Literal.lexical(literal)}"^^#{literal |> Literal.datatype_id() |> term(state, :datatype, nesting)}] do:
~s["#{Literal.lexical(literal)}"^^#{
literal |> Literal.datatype_id() |> term(state, :datatype, nesting)
}]
def prefixed_name(iri, prefixes) do def prefixed_name(iri, prefixes) do
with {ns, name} <- split_iri(iri) do with {ns, name} <- split_iri(iri) do
case prefixes[ns] do case prefixes[ns] do
nil -> nil nil -> nil
prefix -> prefix <> ":" <> name prefix -> prefix <> ":" <> name
end end
end end
end end
defp split_iri(%IRI{} = iri), defp split_iri(%IRI{} = iri),
do: iri |> IRI.parse |> split_iri() do: iri |> IRI.parse() |> split_iri()
defp split_iri(%URI{fragment: fragment} = uri) when not is_nil(fragment), defp split_iri(%URI{fragment: fragment} = uri) when not is_nil(fragment),
do: {RDF.iri(%URI{uri | fragment: ""}), fragment} do: {RDF.iri(%URI{uri | fragment: ""}), fragment}
@ -375,7 +383,6 @@ defmodule RDF.Turtle.Encoder do
|> String.replace("\"", ~S[\"]) |> String.replace("\"", ~S[\"])
end end
defp newline_indent(nesting), defp newline_indent(nesting),
do: "\n" <> String.duplicate(@indentation_char, nesting) do: "\n" <> String.duplicate(@indentation_char, nesting)
end end

View File

@ -3,7 +3,6 @@ defmodule RDF.Turtle.Encoder.State do
alias RDF.{BlankNode, Description} alias RDF.{BlankNode, Description}
def start_link(data, base, prefixes) do def start_link(data, base, prefixes) do
Agent.start_link(fn -> %{data: data, base: base, prefixes: prefixes} end) Agent.start_link(fn -> %{data: data, base: base, prefixes: prefixes} end)
end end
@ -12,11 +11,11 @@ defmodule RDF.Turtle.Encoder.State do
Agent.stop(state) Agent.stop(state)
end end
def data(state), do: Agent.get(state, &(&1.data)) def data(state), do: Agent.get(state, & &1.data)
def base(state), do: Agent.get(state, &(&1.base)) def base(state), do: Agent.get(state, & &1.base)
def prefixes(state), do: Agent.get(state, &(&1.prefixes)) def prefixes(state), do: Agent.get(state, & &1.prefixes)
def list_nodes(state), do: Agent.get(state, &(&1.list_nodes)) def list_nodes(state), do: Agent.get(state, & &1.list_nodes)
def bnode_ref_counter(state), do: Agent.get(state, &(&1.bnode_ref_counter)) def bnode_ref_counter(state), do: Agent.get(state, & &1.bnode_ref_counter)
def bnode_ref_counter(state, bnode) do def bnode_ref_counter(state, bnode) do
bnode_ref_counter(state) |> Map.get(bnode, 0) bnode_ref_counter(state) |> Map.get(bnode, 0)
@ -30,13 +29,12 @@ defmodule RDF.Turtle.Encoder.State do
end end
end end
def list_values(head, state), do: Agent.get(state, &(&1.list_values[head])) def list_values(head, state), do: Agent.get(state, & &1.list_values[head])
def preprocess(state) do def preprocess(state) do
with data = data(state), with data = data(state),
{bnode_ref_counter, list_parents} = bnode_info(data), {bnode_ref_counter, list_parents} = bnode_info(data),
{list_nodes, list_values} = valid_lists(list_parents, bnode_ref_counter, data) {list_nodes, list_values} = valid_lists(list_parents, bnode_ref_counter, data) do
do
Agent.update(state, &Map.put(&1, :bnode_ref_counter, bnode_ref_counter)) Agent.update(state, &Map.put(&1, :bnode_ref_counter, bnode_ref_counter))
Agent.update(state, &Map.put(&1, :list_nodes, list_nodes)) Agent.update(state, &Map.put(&1, :list_nodes, list_nodes))
Agent.update(state, &Map.put(&1, :list_values, list_values)) Agent.update(state, &Map.put(&1, :list_values, list_values))
@ -45,45 +43,50 @@ defmodule RDF.Turtle.Encoder.State do
defp bnode_info(data) do defp bnode_info(data) do
data data
|> RDF.Data.descriptions |> RDF.Data.descriptions()
|> Enum.reduce({%{}, %{}}, |> Enum.reduce(
fn %Description{subject: subject} = description, {%{}, %{}},
{bnode_ref_counter, list_parents} -> fn %Description{subject: subject} = description, {bnode_ref_counter, list_parents} ->
list_parents =
if match?(%BlankNode{}, subject) and
to_list?(description, Map.get(bnode_ref_counter, subject, 0)),
do: Map.put_new(list_parents, subject, nil),
else: list_parents
list_parents = Enum.reduce(description.predications, {bnode_ref_counter, list_parents}, fn
if match?(%BlankNode{}, subject) and {predicate, objects}, {bnode_ref_counter, list_parents} ->
to_list?(description, Map.get(bnode_ref_counter, subject, 0)), Enum.reduce(Map.keys(objects), {bnode_ref_counter, list_parents}, fn
do: Map.put_new(list_parents, subject, nil), %BlankNode{} = object, {bnode_ref_counter, list_parents} ->
else: list_parents {
# Note: The following conditional produces imprecise results
# (sometimes the occurrence in the subject counts, sometimes it doesn't),
# but is sufficient for the current purpose of handling the
# case of a statement with the same subject and object bnode.
Map.update(
bnode_ref_counter,
object,
if(subject == object, do: 2, else: 1),
&(&1 + 1)
),
if predicate == RDF.rest() do
Map.put_new(list_parents, object, subject)
else
list_parents
end
}
Enum.reduce(description.predications, {bnode_ref_counter, list_parents}, fn _, {bnode_ref_counter, list_parents} ->
({predicate, objects}, {bnode_ref_counter, list_parents}) -> {bnode_ref_counter, list_parents}
Enum.reduce(Map.keys(objects), {bnode_ref_counter, list_parents}, fn end)
(%BlankNode{} = object, {bnode_ref_counter, list_parents}) -> end)
{ end
# Note: The following conditional produces imprecise results )
# (sometimes the occurrence in the subject counts, sometimes it doesn't),
# but is sufficient for the current purpose of handling the
# case of a statement with the same subject and object bnode.
Map.update(bnode_ref_counter, object,
(if subject == object, do: 2, else: 1), &(&1 + 1)),
if predicate == RDF.rest do
Map.put_new(list_parents, object, subject)
else
list_parents
end
}
(_, {bnode_ref_counter, list_parents}) ->
{bnode_ref_counter, list_parents}
end)
end)
end)
end end
@list_properties MapSet.new([ @list_properties MapSet.new([
RDF.Utils.Bootstrapping.rdf_iri("first"), RDF.Utils.Bootstrapping.rdf_iri("first"),
RDF.Utils.Bootstrapping.rdf_iri("rest") RDF.Utils.Bootstrapping.rdf_iri("rest")
]) ])
@dialyzer {:nowarn_function, to_list?: 2} @dialyzer {:nowarn_function, to_list?: 2}
defp to_list?(%Description{} = description, 1) do defp to_list?(%Description{} = description, 1) do
@ -97,39 +100,37 @@ defmodule RDF.Turtle.Encoder.State do
defp to_list?(_, _), defp to_list?(_, _),
do: false do: false
defp valid_lists(list_parents, bnode_ref_counter, data) do defp valid_lists(list_parents, bnode_ref_counter, data) do
head_nodes = for {list_node, nil} <- list_parents, do: list_node head_nodes = for {list_node, nil} <- list_parents, do: list_node
all_list_nodes = MapSet.new( all_list_nodes =
for {list_node, _} <- list_parents, Map.get(bnode_ref_counter, list_node, 0) < 2 do MapSet.new(
list_node for {list_node, _} <- list_parents, Map.get(bnode_ref_counter, list_node, 0) < 2 do
end) list_node
Enum.reduce head_nodes, {MapSet.new, %{}},
fn head_node, {valid_list_nodes, list_values} ->
with list when not is_nil(list) <-
RDF.List.new(head_node, data),
list_nodes =
RDF.List.nodes(list),
true <-
Enum.all?(list_nodes, fn
%BlankNode{} = list_node ->
MapSet.member?(all_list_nodes, list_node)
_ ->
false
end)
do
{
Enum.reduce(list_nodes, valid_list_nodes, fn list_node, valid_list_nodes ->
MapSet.put(valid_list_nodes, list_node)
end),
Map.put(list_values, head_node, RDF.List.values(list)),
}
else
_ -> {valid_list_nodes, list_values}
end end
end )
end
Enum.reduce(head_nodes, {MapSet.new(), %{}}, fn head_node, {valid_list_nodes, list_values} ->
with list when not is_nil(list) <-
RDF.List.new(head_node, data),
list_nodes = RDF.List.nodes(list),
true <-
Enum.all?(list_nodes, fn
%BlankNode{} = list_node ->
MapSet.member?(all_list_nodes, list_node)
_ ->
false
end) do
{
Enum.reduce(list_nodes, valid_list_nodes, fn list_node, valid_list_nodes ->
MapSet.put(valid_list_nodes, list_node)
end),
Map.put(list_values, head_node, RDF.List.values(list))
}
else
_ -> {valid_list_nodes, list_values}
end
end)
end
end end

View File

@ -3,7 +3,6 @@ defmodule RDF.Sigils do
Sigils for the most common types of RDF nodes. Sigils for the most common types of RDF nodes.
""" """
@doc ~S""" @doc ~S"""
Handles the sigil `~I` for IRIs. Handles the sigil `~I` for IRIs.
@ -34,7 +33,6 @@ defmodule RDF.Sigils do
Macro.escape(RDF.BlankNode.new(bnode)) Macro.escape(RDF.BlankNode.new(bnode))
end end
@doc ~S""" @doc ~S"""
Handles the sigil `~L` for plain Literals. Handles the sigil `~L` for plain Literals.

View File

@ -8,21 +8,20 @@ defmodule RDF.Statement do
alias RDF.{BlankNode, IRI, Literal, Quad, Term, Triple} alias RDF.{BlankNode, IRI, Literal, Quad, Term, Triple}
import RDF.Guards import RDF.Guards
@type subject :: IRI.t | BlankNode.t @type subject :: IRI.t() | BlankNode.t()
@type predicate :: IRI.t | BlankNode.t @type predicate :: IRI.t() | BlankNode.t()
@type object :: IRI.t | BlankNode.t | Literal.t @type object :: IRI.t() | BlankNode.t() | Literal.t()
@type graph_name :: IRI.t | BlankNode.t @type graph_name :: IRI.t() | BlankNode.t()
@type coercible_subject :: subject | atom | String.t @type coercible_subject :: subject | atom | String.t()
@type coercible_predicate :: predicate | atom | String.t @type coercible_predicate :: predicate | atom | String.t()
@type coercible_object :: object | any @type coercible_object :: object | any
@type coercible_graph_name :: graph_name | atom | String.t @type coercible_graph_name :: graph_name | atom | String.t()
@type qualified_term :: {atom, Term.t | nil} @type qualified_term :: {atom, Term.t() | nil}
@type term_mapping :: (qualified_term -> any | nil) @type term_mapping :: (qualified_term -> any | nil)
@type t :: Triple.t | Quad.t
@type t :: Triple.t() | Quad.t()
@doc """ @doc """
Creates a `RDF.Statement` tuple with proper RDF values. Creates a `RDF.Statement` tuple with proper RDF values.
@ -36,10 +35,10 @@ defmodule RDF.Statement do
iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"} iex> RDF.Statement.coerce {"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>} {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
""" """
@spec coerce(Triple.coercible_t) :: Triple.t @spec coerce(Triple.coercible_t()) :: Triple.t()
@spec coerce(Quad.coercible_t) :: Quad.t @spec coerce(Quad.coercible_t()) :: Quad.t()
def coerce(statement) def coerce(statement)
def coerce({_, _, _} = triple), do: Triple.new(triple) def coerce({_, _, _} = triple), do: Triple.new(triple)
def coerce({_, _, _, _} = quad), do: Quad.new(quad) def coerce({_, _, _, _} = quad), do: Quad.new(quad)
@doc false @doc false
@ -49,7 +48,7 @@ defmodule RDF.Statement do
def coerce_subject(bnode = %BlankNode{}), do: bnode def coerce_subject(bnode = %BlankNode{}), do: bnode
def coerce_subject("_:" <> identifier), do: RDF.bnode(identifier) def coerce_subject("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_subject(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri) def coerce_subject(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_subject(arg), do: raise RDF.Triple.InvalidSubjectError, subject: arg def coerce_subject(arg), do: raise(RDF.Triple.InvalidSubjectError, subject: arg)
@doc false @doc false
@spec coerce_predicate(coercible_predicate) :: predicate @spec coerce_predicate(coercible_predicate) :: predicate
@ -60,7 +59,7 @@ defmodule RDF.Statement do
# TODO: Support an option `:strict_rdf` to explicitly disallow them or produce warnings or ... # TODO: Support an option `:strict_rdf` to explicitly disallow them or produce warnings or ...
def coerce_predicate(bnode = %BlankNode{}), do: bnode def coerce_predicate(bnode = %BlankNode{}), do: bnode
def coerce_predicate(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri) def coerce_predicate(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_predicate(arg), do: raise RDF.Triple.InvalidPredicateError, predicate: arg def coerce_predicate(arg), do: raise(RDF.Triple.InvalidPredicateError, predicate: arg)
@doc false @doc false
@spec coerce_object(coercible_object) :: object @spec coerce_object(coercible_object) :: object
@ -80,9 +79,9 @@ defmodule RDF.Statement do
def coerce_graph_name(bnode = %BlankNode{}), do: bnode def coerce_graph_name(bnode = %BlankNode{}), do: bnode
def coerce_graph_name("_:" <> identifier), do: RDF.bnode(identifier) def coerce_graph_name("_:" <> identifier), do: RDF.bnode(identifier)
def coerce_graph_name(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri) def coerce_graph_name(iri) when maybe_ns_term(iri) or is_binary(iri), do: RDF.iri!(iri)
def coerce_graph_name(arg),
do: raise RDF.Quad.InvalidGraphContextError, graph_context: arg
def coerce_graph_name(arg),
do: raise(RDF.Quad.InvalidGraphContextError, graph_context: arg)
@doc """ @doc """
Returns a tuple of native Elixir values from a `RDF.Statement` of RDF terms. Returns a tuple of native Elixir values from a `RDF.Statement` of RDF terms.
@ -117,9 +116,9 @@ defmodule RDF.Statement do
{"S", :p, 42, ~I<http://example.com/Graph>} {"S", :p, 42, ~I<http://example.com/Graph>}
""" """
@spec values(t | any, term_mapping) :: Triple.t_values | Quad.t_values | nil @spec values(t | any, term_mapping) :: Triple.t_values() | Quad.t_values() | nil
def values(statement, mapping \\ &default_term_mapping/1) def values(statement, mapping \\ &default_term_mapping/1)
def values({_, _, _} = triple, mapping), do: RDF.Triple.values(triple, mapping) def values({_, _, _} = triple, mapping), do: RDF.Triple.values(triple, mapping)
def values({_, _, _, _} = quad, mapping), do: RDF.Quad.values(quad, mapping) def values({_, _, _, _} = quad, mapping), do: RDF.Quad.values(quad, mapping)
def values(_, _), do: nil def values(_, _), do: nil
@ -129,7 +128,6 @@ defmodule RDF.Statement do
def default_term_mapping({:graph_name, nil}), do: nil def default_term_mapping({:graph_name, nil}), do: nil
def default_term_mapping({_, term}), do: RDF.Term.value(term) def default_term_mapping({_, term}), do: RDF.Term.value(term)
@doc """ @doc """
Checks if the given tuple is a valid RDF statement, i.e. RDF triple or quad. Checks if the given tuple is a valid RDF statement, i.e. RDF triple or quad.
@ -137,7 +135,7 @@ defmodule RDF.Statement do
position only IRIs and blank nodes allowed, while on the predicate and graph position only IRIs and blank nodes allowed, while on the predicate and graph
context position only IRIs allowed. The object position can be any RDF term. context position only IRIs allowed. The object position can be any RDF term.
""" """
@spec valid?(Triple.t | Quad.t | any) :: boolean @spec valid?(Triple.t() | Quad.t() | any) :: boolean
def valid?(tuple) def valid?(tuple)
def valid?({subject, predicate, object}) do def valid?({subject, predicate, object}) do
@ -152,22 +150,21 @@ defmodule RDF.Statement do
def valid?(_), do: false def valid?(_), do: false
@spec valid_subject?(subject | any) :: boolean @spec valid_subject?(subject | any) :: boolean
def valid_subject?(%IRI{}), do: true def valid_subject?(%IRI{}), do: true
def valid_subject?(%BlankNode{}), do: true def valid_subject?(%BlankNode{}), do: true
def valid_subject?(_), do: false def valid_subject?(_), do: false
@spec valid_predicate?(predicate | any) :: boolean @spec valid_predicate?(predicate | any) :: boolean
def valid_predicate?(%IRI{}), do: true def valid_predicate?(%IRI{}), do: true
def valid_predicate?(_), do: false def valid_predicate?(_), do: false
@spec valid_object?(object | any) :: boolean @spec valid_object?(object | any) :: boolean
def valid_object?(%IRI{}), do: true def valid_object?(%IRI{}), do: true
def valid_object?(%BlankNode{}), do: true def valid_object?(%BlankNode{}), do: true
def valid_object?(%Literal{}), do: true def valid_object?(%Literal{}), do: true
def valid_object?(_), do: false def valid_object?(_), do: false
@spec valid_graph_name?(graph_name | any) :: boolean @spec valid_graph_name?(graph_name | any) :: boolean
def valid_graph_name?(%IRI{}), do: true def valid_graph_name?(%IRI{}), do: true
def valid_graph_name?(_), do: false def valid_graph_name?(_), do: false
end end

View File

@ -11,8 +11,7 @@ defprotocol RDF.Term do
see <https://www.w3.org/TR/sparql11-query/#defn_RDFTerm> see <https://www.w3.org/TR/sparql11-query/#defn_RDFTerm>
""" """
@type t :: RDF.IRI.t | RDF.BlankNode.t | RDF.Literal.t @type t :: RDF.IRI.t() | RDF.BlankNode.t() | RDF.Literal.t()
@doc """ @doc """
Checks if the given value is a RDF term. Checks if the given value is a RDF term.
@ -43,7 +42,6 @@ defprotocol RDF.Term do
@fallback_to_any true @fallback_to_any true
def equal?(term1, term2) def equal?(term1, term2)
@doc """ @doc """
Tests for equality of values. Tests for equality of values.
@ -89,52 +87,52 @@ defprotocol RDF.Term do
""" """
def value(term) def value(term)
end end
defimpl RDF.Term, for: RDF.IRI do defimpl RDF.Term, for: RDF.IRI do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.IRI.equal_value?(term1, term2) def equal_value?(term1, term2), do: RDF.IRI.equal_value?(term1, term2)
def coerce(term), do: term def coerce(term), do: term
def value(term), do: term.value def value(term), do: term.value
def term?(_), do: true def term?(_), do: true
end end
defimpl RDF.Term, for: RDF.BlankNode do defimpl RDF.Term, for: RDF.BlankNode do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.BlankNode.equal_value?(term1, term2) def equal_value?(term1, term2), do: RDF.BlankNode.equal_value?(term1, term2)
def coerce(term), do: term def coerce(term), do: term
def value(term), do: to_string(term) def value(term), do: to_string(term)
def term?(_), do: true def term?(_), do: true
end end
defimpl RDF.Term, for: Reference do defimpl RDF.Term, for: Reference do
@dialyzer {:nowarn_function, equal_value?: 2} @dialyzer {:nowarn_function, equal_value?: 2}
@dialyzer {:nowarn_function, coerce: 1} @dialyzer {:nowarn_function, coerce: 1}
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.BlankNode.new(term) def coerce(term), do: RDF.BlankNode.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: RDF.Literal do defimpl RDF.Term, for: RDF.Literal do
def equal?(term1, term2), do: RDF.Literal.equal?(term1, term2) def equal?(term1, term2), do: RDF.Literal.equal?(term1, term2)
def equal_value?(term1, term2), do: RDF.Literal.equal_value?(term1, term2) def equal_value?(term1, term2), do: RDF.Literal.equal_value?(term1, term2)
def coerce(term), do: term def coerce(term), do: term
def value(term), do: RDF.Literal.value(term) || RDF.Literal.lexical(term) def value(term), do: RDF.Literal.value(term) || RDF.Literal.lexical(term)
def term?(_), do: true def term?(_), do: true
end end
defimpl RDF.Term, for: Atom do defimpl RDF.Term, for: Atom do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(nil, _), do: nil def equal_value?(nil, _), do: nil
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(true), do: RDF.XSD.true def coerce(true), do: RDF.XSD.true()
def coerce(false), do: RDF.XSD.false def coerce(false), do: RDF.XSD.false()
def coerce(nil), do: nil def coerce(nil), do: nil
def coerce(term) do def coerce(term) do
case RDF.Namespace.resolve_term(term) do case RDF.Namespace.resolve_term(term) do
{:ok, iri} -> iri {:ok, iri} -> iri
@ -142,90 +140,90 @@ defimpl RDF.Term, for: Atom do
end end
end end
def value(true), do: true def value(true), do: true
def value(false), do: false def value(false), do: false
def value(nil), do: nil def value(nil), do: nil
def value(term), do: RDF.Term.value(coerce(term)) def value(term), do: RDF.Term.value(coerce(term))
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: BitString do defimpl RDF.Term, for: BitString do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.String.new(term) def coerce(term), do: RDF.XSD.String.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Integer do defimpl RDF.Term, for: Integer do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Integer.new(term) def coerce(term), do: RDF.XSD.Integer.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Float do defimpl RDF.Term, for: Float do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Double.new(term) def coerce(term), do: RDF.XSD.Double.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Decimal do defimpl RDF.Term, for: Decimal do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Decimal.new(term) def coerce(term), do: RDF.XSD.Decimal.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: DateTime do defimpl RDF.Term, for: DateTime do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.DateTime.new(term) def coerce(term), do: RDF.XSD.DateTime.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: NaiveDateTime do defimpl RDF.Term, for: NaiveDateTime do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.DateTime.new(term) def coerce(term), do: RDF.XSD.DateTime.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Date do defimpl RDF.Term, for: Date do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Date.new(term) def coerce(term), do: RDF.XSD.Date.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Time do defimpl RDF.Term, for: Time do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Time.new(term) def coerce(term), do: RDF.XSD.Time.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: URI do defimpl RDF.Term, for: URI do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2) def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.AnyURI.new(term) def coerce(term), do: RDF.XSD.AnyURI.new(term)
def value(term), do: term def value(term), do: term
def term?(_), do: false def term?(_), do: false
end end
defimpl RDF.Term, for: Any do defimpl RDF.Term, for: Any do
def equal?(term1, term2), do: term1 == term2 def equal?(term1, term2), do: term1 == term2
def equal_value?(_, _), do: nil def equal_value?(_, _), do: nil
def coerce(_), do: nil def coerce(_), do: nil
def value(_), do: nil def value(_), do: nil
def term?(_), do: false def term?(_), do: false
end end

View File

@ -8,14 +8,13 @@ defmodule RDF.Triple do
alias RDF.Statement alias RDF.Statement
@type t :: {Statement.subject, Statement.predicate, Statement.object} @type t :: {Statement.subject(), Statement.predicate(), Statement.object()}
@type coercible_t :: @type coercible_t ::
{Statement.coercible_subject, Statement.coercible_predicate, {Statement.coercible_subject(), Statement.coercible_predicate(),
Statement.coercible_object} Statement.coercible_object()}
@type t_values :: {String.t, String.t, any}
@type t_values :: {String.t(), String.t(), any}
@doc """ @doc """
Creates a `RDF.Triple` with proper RDF values. Creates a `RDF.Triple` with proper RDF values.
@ -32,9 +31,9 @@ defmodule RDF.Triple do
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42)} {RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42)}
""" """
@spec new( @spec new(
Statement.coercible_subject, Statement.coercible_subject(),
Statement.coercible_predicate, Statement.coercible_predicate(),
Statement.coercible_object Statement.coercible_object()
) :: t ) :: t
def new(subject, predicate, object) do def new(subject, predicate, object) do
{ {
@ -61,7 +60,6 @@ defmodule RDF.Triple do
@spec new(coercible_t) :: t @spec new(coercible_t) :: t
def new({subject, predicate, object}), do: new(subject, predicate, object) def new({subject, predicate, object}), do: new(subject, predicate, object)
@doc """ @doc """
Returns a tuple of native Elixir values from a `RDF.Triple` of RDF terms. Returns a tuple of native Elixir values from a `RDF.Triple` of RDF terms.
@ -87,14 +85,13 @@ defmodule RDF.Triple do
{"S", "p", 42} {"S", "p", 42}
""" """
@spec values(t | any, Statement.term_mapping) :: t_values | nil @spec values(t | any, Statement.term_mapping()) :: t_values | nil
def values(triple, mapping \\ &Statement.default_term_mapping/1) def values(triple, mapping \\ &Statement.default_term_mapping/1)
def values({subject, predicate, object}, mapping) do def values({subject, predicate, object}, mapping) do
with subject_value when not is_nil(subject_value) <- mapping.({:subject, subject}), with subject_value when not is_nil(subject_value) <- mapping.({:subject, subject}),
predicate_value when not is_nil(predicate_value) <- mapping.({:predicate, predicate}), predicate_value when not is_nil(predicate_value) <- mapping.({:predicate, predicate}),
object_value when not is_nil(object_value) <- mapping.({:object, object}) object_value when not is_nil(object_value) <- mapping.({:object, object}) do
do
{subject_value, predicate_value, object_value} {subject_value, predicate_value, object_value}
else else
_ -> nil _ -> nil
@ -103,7 +100,6 @@ defmodule RDF.Triple do
def values(_, _), do: nil def values(_, _), do: nil
@doc """ @doc """
Checks if the given tuple is a valid RDF triple. Checks if the given tuple is a valid RDF triple.
@ -115,5 +111,4 @@ defmodule RDF.Triple do
def valid?(tuple) def valid?(tuple)
def valid?({_, _, _} = triple), do: Statement.valid?(triple) def valid?({_, _, _} = triple), do: Statement.valid?(triple)
def valid?(_), do: false def valid?(_), do: false
end end

View File

@ -1,7 +1,7 @@
defmodule RDF.Utils.Bootstrapping do defmodule RDF.Utils.Bootstrapping do
@moduledoc !""" @moduledoc !"""
This module holds functions to circumvent circular dependency problems. This module holds functions to circumvent circular dependency problems.
""" """
@xsd_base_iri "http://www.w3.org/2001/XMLSchema#" @xsd_base_iri "http://www.w3.org/2001/XMLSchema#"
@rdf_base_iri "http://www.w3.org/1999/02/22-rdf-syntax-ns#" @rdf_base_iri "http://www.w3.org/1999/02/22-rdf-syntax-ns#"

View File

@ -1,6 +1,6 @@
defmodule RDF.Utils.Guards do defmodule RDF.Utils.Guards do
defguard is_ordinary_atom(term) defguard is_ordinary_atom(term)
when is_atom(term) and term not in [nil, true, false] when is_atom(term) and term not in [nil, true, false]
defguard maybe_module(term) when is_ordinary_atom(term) defguard maybe_module(term) when is_ordinary_atom(term)
end end

View File

@ -13,35 +13,41 @@ defmodule RDF.Utils.ResourceClassifier do
def property?(resource, data) do def property?(resource, data) do
with %Description{} = description <- RDF.Data.description(data, resource) do with %Description{} = description <- RDF.Data.description(data, resource) do
property_by_domain?(description) or property_by_domain?(description) or
property_by_rdf_type?(Description.get(description, @rdf_type)) property_by_rdf_type?(Description.get(description, @rdf_type))
end end
# || property_by_predicate_usage?(resource, data)
# || property_by_predicate_usage?(resource, data)
end end
@property_properties (Enum.map(
@property_properties Enum.map(~w[ ~w[
domain domain
range range
subPropertyOf subPropertyOf
], &rdfs_iri/1) ++ ],
Enum.map(~w[ &rdfs_iri/1
) ++
Enum.map(
~w[
equivalentProperty equivalentProperty
inverseOf inverseOf
propertyDisjointWith propertyDisjointWith
], &owl_iri/1) ],
|> MapSet.new &owl_iri/1
))
|> MapSet.new()
defp property_by_domain?(description) do defp property_by_domain?(description) do
Enum.any? @property_properties, fn property -> Enum.any?(@property_properties, fn property ->
description[property] description[property]
end end)
end end
@property_classes [ @property_classes [
rdf_iri("Property"), rdf_iri("Property"),
rdfs_iri("ContainerMembershipProperty") rdfs_iri("ContainerMembershipProperty")
| | Enum.map(
Enum.map(~w[ ~w[
ObjectProperty ObjectProperty
DatatypeProperty DatatypeProperty
AnnotationProperty AnnotationProperty
@ -53,23 +59,22 @@ defmodule RDF.Utils.ResourceClassifier do
IrreflexiveProperty IrreflexiveProperty
TransitiveProperty TransitiveProperty
DeprecatedProperty DeprecatedProperty
], &owl_iri/1) ],
&owl_iri/1
)
] ]
|> MapSet.new |> MapSet.new()
@dialyzer {:nowarn_function, property_by_rdf_type?: 1} @dialyzer {:nowarn_function, property_by_rdf_type?: 1}
defp property_by_rdf_type?(nil), do: nil defp property_by_rdf_type?(nil), do: nil
defp property_by_rdf_type?(types) do defp property_by_rdf_type?(types) do
not ( not (types
types |> MapSet.new()
|> MapSet.new |> MapSet.disjoint?(@property_classes))
|> MapSet.disjoint?(@property_classes)
)
end end
# defp property_by_predicate_usage?(resource, data) do
# defp property_by_predicate_usage?(resource, data) do # resource in Graph.predicates(data) || nil
# resource in Graph.predicates(data) || nil # end
# end
end end

View File

@ -23,13 +23,14 @@ defmodule RDF.Vocabulary.Namespace do
Defines a `RDF.Namespace` module for a RDF vocabulary. Defines a `RDF.Namespace` module for a RDF vocabulary.
""" """
defmacro defvocab(name, opts) do defmacro defvocab(name, opts) do
strict = strict?(opts) strict = strict?(opts)
base_iri = base_iri!(opts) base_iri = base_iri!(opts)
file = filename!(opts) file = filename!(opts)
{terms, data} = {terms, data} =
case source!(opts) do case source!(opts) do
{:terms, terms} -> {terms, nil} {:terms, terms} -> {terms, nil}
{:data, data} -> {rdf_data_vocab_terms(data, base_iri), data} {:data, data} -> {rdf_data_vocab_terms(data, base_iri), data}
end end
unless Mix.env() == :test do unless Mix.env() == :test do
@ -37,6 +38,7 @@ defmodule RDF.Vocabulary.Namespace do
end end
ignored_terms = ignored_terms!(opts) ignored_terms = ignored_terms!(opts)
terms = terms =
terms terms
|> term_mapping!(opts) |> term_mapping!(opts)
@ -44,8 +46,9 @@ defmodule RDF.Vocabulary.Namespace do
|> validate_terms! |> validate_terms!
|> validate_characters!(opts) |> validate_characters!(opts)
|> validate_case!(data, base_iri, opts) |> validate_case!(data, base_iri, opts)
case_separated_terms = group_terms_by_case(terms) case_separated_terms = group_terms_by_case(terms)
lowercased_terms = Map.get(case_separated_terms, :lowercased, %{}) lowercased_terms = Map.get(case_separated_terms, :lowercased, %{})
quote do quote do
vocabdoc = Module.delete_attribute(__MODULE__, :vocabdoc) vocabdoc = Module.delete_attribute(__MODULE__, :vocabdoc)
@ -60,7 +63,7 @@ defmodule RDF.Vocabulary.Namespace do
end end
@base_iri unquote(base_iri) @base_iri unquote(base_iri)
@spec __base_iri__ :: String.t @spec __base_iri__ :: String.t()
def __base_iri__, do: @base_iri def __base_iri__, do: @base_iri
@strict unquote(strict) @strict unquote(strict)
@ -69,24 +72,24 @@ defmodule RDF.Vocabulary.Namespace do
@terms unquote(Macro.escape(terms)) @terms unquote(Macro.escape(terms))
@impl Elixir.RDF.Namespace @impl Elixir.RDF.Namespace
def __terms__, do: @terms |> Map.keys def __terms__, do: @terms |> Map.keys()
@ignored_terms unquote(Macro.escape(ignored_terms)) @ignored_terms unquote(Macro.escape(ignored_terms))
@doc """ @doc """
Returns all known IRIs of the vocabulary. Returns all known IRIs of the vocabulary.
""" """
@spec __iris__ :: [Elixir.RDF.IRI.t] @spec __iris__ :: [Elixir.RDF.IRI.t()]
def __iris__ do def __iris__ do
@terms @terms
|> Enum.map(fn |> Enum.map(fn
{term, true} -> term_to_iri(@base_iri, term) {term, true} -> term_to_iri(@base_iri, term)
{_alias, term} -> term_to_iri(@base_iri, term) {_alias, term} -> term_to_iri(@base_iri, term)
end) end)
|> Enum.uniq |> Enum.uniq()
end end
define_vocab_terms unquote(lowercased_terms), unquote(base_iri) define_vocab_terms(unquote(lowercased_terms), unquote(base_iri))
@impl Elixir.RDF.Namespace @impl Elixir.RDF.Namespace
@dialyzer {:nowarn_function, __resolve_term__: 1} @dialyzer {:nowarn_function, __resolve_term__: 1}
@ -95,15 +98,16 @@ defmodule RDF.Vocabulary.Namespace do
nil -> nil ->
if @strict or MapSet.member?(@ignored_terms, term) do if @strict or MapSet.member?(@ignored_terms, term) do
{:error, {:error,
%Elixir.RDF.Namespace.UndefinedTermError{ %Elixir.RDF.Namespace.UndefinedTermError{
message: "undefined term #{term} in strict vocabulary #{__MODULE__}" message: "undefined term #{term} in strict vocabulary #{__MODULE__}"
} }}
}
else else
{:ok, term_to_iri(@base_iri, term)} {:ok, term_to_iri(@base_iri, term)}
end end
true -> true ->
{:ok, term_to_iri(@base_iri, term)} {:ok, term_to_iri(@base_iri, term)}
original_term -> original_term ->
{:ok, term_to_iri(@base_iri, original_term)} {:ok, term_to_iri(@base_iri, original_term)}
end end
@ -134,39 +138,43 @@ defmodule RDF.Vocabulary.Namespace do
defmacro define_vocab_terms(terms, base_iri) do defmacro define_vocab_terms(terms, base_iri) do
terms terms
|> Stream.filter(fn |> Stream.filter(fn
{term, true} -> valid_term?(term) {term, true} -> valid_term?(term)
{_, _} -> true {_, _} -> true
end) end)
|> Stream.map(fn |> Stream.map(fn
{term, true} -> {term, term} {term, true} -> {term, term}
{term, original_term} -> {term, original_term} {term, original_term} -> {term, original_term}
end) end)
|> Enum.map(fn {term, iri_suffix} -> |> Enum.map(fn {term, iri_suffix} ->
iri = term_to_iri(base_iri, iri_suffix) iri = term_to_iri(base_iri, iri_suffix)
quote do
@doc "<#{unquote(to_string(iri))}>"
def unquote(term)(), do: unquote(Macro.escape(iri))
@doc "`RDF.Description` builder for `#{unquote(term)}/0`" quote do
def unquote(term)(subject, object) do @doc "<#{unquote(to_string(iri))}>"
RDF.Description.new(subject, unquote(Macro.escape(iri)), object) def unquote(term)(), do: unquote(Macro.escape(iri))
end
# Is there a better way to support multiple objects via arguments? @doc "`RDF.Description` builder for `#{unquote(term)}/0`"
@doc false def unquote(term)(subject, object) do
def unquote(term)(subject, o1, o2), RDF.Description.new(subject, unquote(Macro.escape(iri)), object)
do: unquote(term)(subject, [o1, o2])
@doc false
def unquote(term)(subject, o1, o2, o3),
do: unquote(term)(subject, [o1, o2, o3])
@doc false
def unquote(term)(subject, o1, o2, o3, o4),
do: unquote(term)(subject, [o1, o2, o3, o4])
@doc false
def unquote(term)(subject, o1, o2, o3, o4, o5),
do: unquote(term)(subject, [o1, o2, o3, o4, o5])
end end
end)
# Is there a better way to support multiple objects via arguments?
@doc false
def unquote(term)(subject, o1, o2),
do: unquote(term)(subject, [o1, o2])
@doc false
def unquote(term)(subject, o1, o2, o3),
do: unquote(term)(subject, [o1, o2, o3])
@doc false
def unquote(term)(subject, o1, o2, o3, o4),
do: unquote(term)(subject, [o1, o2, o3, o4])
@doc false
def unquote(term)(subject, o1, o2, o3, o4, o5),
do: unquote(term)(subject, [o1, o2, o3, o4, o5])
end
end)
end end
defp strict?(opts), defp strict?(opts),
@ -174,9 +182,10 @@ defmodule RDF.Vocabulary.Namespace do
defp base_iri!(opts) do defp base_iri!(opts) do
base_iri = Keyword.fetch!(opts, :base_iri) base_iri = Keyword.fetch!(opts, :base_iri)
unless is_binary(base_iri) and String.ends_with?(base_iri, ["/", "#"]) do unless is_binary(base_iri) and String.ends_with?(base_iri, ["/", "#"]) do
raise RDF.Namespace.InvalidVocabBaseIRIError, raise RDF.Namespace.InvalidVocabBaseIRIError,
"a base_iri without a trailing '/' or '#' is invalid" "a base_iri without a trailing '/' or '#' is invalid"
else else
base_iri base_iri
end end
@ -184,9 +193,15 @@ defmodule RDF.Vocabulary.Namespace do
defp source!(opts) do defp source!(opts) do
cond do cond do
Keyword.has_key?(opts, :file) -> {:data, filename!(opts) |> RDF.read_file!()} Keyword.has_key?(opts, :file) ->
rdf_data = Keyword.get(opts, :data) -> {:data, raw_rdf_data(rdf_data)} {:data, filename!(opts) |> RDF.read_file!()}
terms = Keyword.get(opts, :terms) -> {:terms, terms_from_user_input!(terms)}
rdf_data = Keyword.get(opts, :data) ->
{:data, raw_rdf_data(rdf_data)}
terms = Keyword.get(opts, :terms) ->
{:terms, terms_from_user_input!(terms)}
true -> true ->
raise KeyError, key: ~w[terms data file], term: opts raise KeyError, key: ~w[terms data file], term: opts
end end
@ -194,81 +209,89 @@ defmodule RDF.Vocabulary.Namespace do
defp terms_from_user_input!(terms) do defp terms_from_user_input!(terms) do
# TODO: find an alternative to Code.eval_quoted - We want to support that the terms can be given as sigils ... # TODO: find an alternative to Code.eval_quoted - We want to support that the terms can be given as sigils ...
{terms, _ } = Code.eval_quoted(terms, [], rdf_data_env()) {terms, _} = Code.eval_quoted(terms, [], rdf_data_env())
Enum.map terms, fn
term when is_atom(term) -> term Enum.map(terms, fn
term when is_binary(term) -> String.to_atom(term) term when is_atom(term) ->
term
term when is_binary(term) ->
String.to_atom(term)
term -> term ->
raise RDF.Namespace.InvalidTermError, raise RDF.Namespace.InvalidTermError,
"'#{term}' is not a valid vocabulary term" "'#{term}' is not a valid vocabulary term"
end end)
end end
defp raw_rdf_data(%RDF.Description{} = rdf_data), do: rdf_data defp raw_rdf_data(%RDF.Description{} = rdf_data), do: rdf_data
defp raw_rdf_data(%RDF.Graph{} = rdf_data), do: rdf_data defp raw_rdf_data(%RDF.Graph{} = rdf_data), do: rdf_data
defp raw_rdf_data(%RDF.Dataset{} = rdf_data), do: rdf_data defp raw_rdf_data(%RDF.Dataset{} = rdf_data), do: rdf_data
defp raw_rdf_data(rdf_data) do defp raw_rdf_data(rdf_data) do
# TODO: find an alternative to Code.eval_quoted # TODO: find an alternative to Code.eval_quoted
{rdf_data, _} = Code.eval_quoted(rdf_data, [], rdf_data_env()) {rdf_data, _} = Code.eval_quoted(rdf_data, [], rdf_data_env())
rdf_data rdf_data
end end
defp ignored_terms!(opts) do defp ignored_terms!(opts) do
# TODO: find an alternative to Code.eval_quoted - We want to support that the terms can be given as sigils ... # TODO: find an alternative to Code.eval_quoted - We want to support that the terms can be given as sigils ...
with terms = Keyword.get(opts, :ignore, []) do with terms = Keyword.get(opts, :ignore, []) do
{terms, _ } = Code.eval_quoted(terms, [], rdf_data_env()) {terms, _} = Code.eval_quoted(terms, [], rdf_data_env())
terms terms
|> Enum.map(fn |> Enum.map(fn
term when is_atom(term) -> term term when is_atom(term) -> term
term when is_binary(term) -> String.to_atom(term) term when is_binary(term) -> String.to_atom(term)
term -> raise RDF.Namespace.InvalidTermError, inspect(term) term -> raise RDF.Namespace.InvalidTermError, inspect(term)
end) end)
|> MapSet.new |> MapSet.new()
end end
end end
defp term_mapping!(terms, opts) do defp term_mapping!(terms, opts) do
terms = Map.new terms, fn terms =
term when is_atom(term) -> {term, true} Map.new(terms, fn
term -> {String.to_atom(term), true} term when is_atom(term) -> {term, true}
end term -> {String.to_atom(term), true}
end)
Keyword.get(opts, :alias, []) Keyword.get(opts, :alias, [])
|> Enum.reduce(terms, fn {alias, original_term}, terms -> |> Enum.reduce(terms, fn {alias, original_term}, terms ->
term = String.to_atom(original_term) term = String.to_atom(original_term)
cond do
not valid_characters?(alias) ->
raise RDF.Namespace.InvalidAliasError,
"alias '#{alias}' contains invalid characters"
Map.get(terms, alias) == true -> cond do
raise RDF.Namespace.InvalidAliasError, not valid_characters?(alias) ->
"alias '#{alias}' already defined" raise RDF.Namespace.InvalidAliasError,
"alias '#{alias}' contains invalid characters"
strict?(opts) and not Map.has_key?(terms, term) -> Map.get(terms, alias) == true ->
raise RDF.Namespace.InvalidAliasError, raise RDF.Namespace.InvalidAliasError,
"alias '#{alias}' already defined"
strict?(opts) and not Map.has_key?(terms, term) ->
raise RDF.Namespace.InvalidAliasError,
"term '#{original_term}' is not a term in this vocabulary" "term '#{original_term}' is not a term in this vocabulary"
Map.get(terms, term, true) != true -> Map.get(terms, term, true) != true ->
raise RDF.Namespace.InvalidAliasError, raise RDF.Namespace.InvalidAliasError,
"'#{original_term}' is already an alias" "'#{original_term}' is already an alias"
true -> true ->
Map.put(terms, alias, to_string(original_term)) Map.put(terms, alias, to_string(original_term))
end end
end) end)
end end
defp aliased_terms(terms) do defp aliased_terms(terms) do
terms terms
|> Map.values |> Map.values()
|> MapSet.new |> MapSet.new()
|> MapSet.delete(true) |> MapSet.delete(true)
|> Enum.map(&String.to_atom/1) |> Enum.map(&String.to_atom/1)
end end
@invalid_terms MapSet.new ~w[ @invalid_terms MapSet.new(~w[
and and
or or
xor xor
@ -288,7 +311,7 @@ defmodule RDF.Vocabulary.Namespace do
require require
super super
__aliases__ __aliases__
]a ]a)
def invalid_terms, do: @invalid_terms def invalid_terms, do: @invalid_terms
@ -309,18 +332,17 @@ defmodule RDF.Vocabulary.Namespace do
defp handle_invalid_terms!(invalid_terms) do defp handle_invalid_terms!(invalid_terms) do
raise RDF.Namespace.InvalidTermError, """ raise RDF.Namespace.InvalidTermError, """
The following terms can not be used, because they conflict with the Elixir semantics: The following terms can not be used, because they conflict with the Elixir semantics:
- #{Enum.join(invalid_terms, "\n- ")} - #{Enum.join(invalid_terms, "\n- ")}
You have the following options: You have the following options:
- define an alias with the :alias option on defvocab - define an alias with the :alias option on defvocab
- ignore the resource with the :ignore option on defvocab - ignore the resource with the :ignore option on defvocab
""" """
end end
defp validate_characters!(terms, opts) do defp validate_characters!(terms, opts) do
if (handling = Keyword.get(opts, :invalid_characters, :fail)) == :ignore do if (handling = Keyword.get(opts, :invalid_characters, :fail)) == :ignore do
terms terms
@ -333,8 +355,7 @@ defmodule RDF.Vocabulary.Namespace do
defp detect_invalid_characters(terms) do defp detect_invalid_characters(terms) do
with aliased_terms = aliased_terms(terms) do with aliased_terms = aliased_terms(terms) do
for {term, _} <- terms, term not in aliased_terms and not valid_characters?(term), for {term, _} <- terms, term not in aliased_terms and not valid_characters?(term), do: term
do: term
end end
end end
@ -342,32 +363,35 @@ defmodule RDF.Vocabulary.Namespace do
defp handle_invalid_characters(invalid_terms, :fail, _) do defp handle_invalid_characters(invalid_terms, :fail, _) do
raise RDF.Namespace.InvalidTermError, """ raise RDF.Namespace.InvalidTermError, """
The following terms contain invalid characters: The following terms contain invalid characters:
- #{Enum.join(invalid_terms, "\n- ")} - #{Enum.join(invalid_terms, "\n- ")}
You have the following options: You have the following options:
- if you are in control of the vocabulary, consider renaming the resource - if you are in control of the vocabulary, consider renaming the resource
- define an alias with the :alias option on defvocab - define an alias with the :alias option on defvocab
- change the handling of invalid characters with the :invalid_characters option on defvocab - change the handling of invalid characters with the :invalid_characters option on defvocab
- ignore the resource with the :ignore option on defvocab - ignore the resource with the :ignore option on defvocab
""" """
end end
defp handle_invalid_characters(invalid_terms, :warn, terms) do defp handle_invalid_characters(invalid_terms, :warn, terms) do
Enum.each invalid_terms, fn term -> Enum.each(invalid_terms, fn term ->
IO.warn "'#{term}' is not valid term, since it contains invalid characters" IO.warn("'#{term}' is not valid term, since it contains invalid characters")
end end)
terms terms
end end
defp valid_characters?(term) when is_atom(term), defp valid_characters?(term) when is_atom(term),
do: valid_characters?(Atom.to_string(term)) do: valid_characters?(Atom.to_string(term))
defp valid_characters?(term), defp valid_characters?(term),
do: Regex.match?(~r/^[a-zA-Z_]\w*$/, term) do: Regex.match?(~r/^[a-zA-Z_]\w*$/, term)
defp validate_case!(terms, nil, _, _), do: terms defp validate_case!(terms, nil, _, _), do: terms
defp validate_case!(terms, data, base_iri, opts) do defp validate_case!(terms, data, base_iri, opts) do
if (handling = Keyword.get(opts, :case_violations, :warn)) == :ignore do if (handling = Keyword.get(opts, :case_violations, :warn)) == :ignore do
terms terms
@ -381,40 +405,43 @@ defmodule RDF.Vocabulary.Namespace do
defp detect_case_violations(terms, data, base_iri) do defp detect_case_violations(terms, data, base_iri) do
aliased_terms = aliased_terms(terms) aliased_terms = aliased_terms(terms)
terms terms
|> Enum.filter(fn {term, _} -> |> Enum.filter(fn {term, _} ->
not(Atom.to_string(term) |> String.starts_with?("_")) not (Atom.to_string(term) |> String.starts_with?("_"))
end) end)
|> Enum.filter(fn |> Enum.filter(fn
{term, true} -> {term, true} ->
if term not in aliased_terms do if term not in aliased_terms do
proper_case?(term, base_iri, Atom.to_string(term), data) proper_case?(term, base_iri, Atom.to_string(term), data)
end end
{term, original_term} ->
proper_case?(term, base_iri, original_term, data) {term, original_term} ->
end) proper_case?(term, base_iri, original_term, data)
end)
end end
defp proper_case?(term, base_iri, iri_suffix, data) do defp proper_case?(term, base_iri, iri_suffix, data) do
case ResourceClassifier.property?(term_to_iri(base_iri, iri_suffix), data) do case ResourceClassifier.property?(term_to_iri(base_iri, iri_suffix), data) do
true -> not lowercase?(term) true -> not lowercase?(term)
false -> lowercase?(term) false -> lowercase?(term)
nil -> lowercase?(term) nil -> lowercase?(term)
end end
end end
defp group_case_violations(violations) do defp group_case_violations(violations) do
violations violations
|> Enum.group_by(fn |> Enum.group_by(fn
{term, true} -> {term, true} ->
if lowercase?(term), if lowercase?(term),
do: :lowercased_term, do: :lowercased_term,
else: :capitalized_term else: :capitalized_term
{term, _original} ->
if lowercase?(term), {term, _original} ->
do: :lowercased_alias, if lowercase?(term),
else: :capitalized_alias do: :lowercased_alias,
end) else: :capitalized_alias
end)
end end
defp handle_case_violations(%{} = violations, _, terms, _, _) when map_size(violations) == 0, defp handle_case_violations(%{} = violations, _, terms, _, _) when map_size(violations) == 0,
@ -427,101 +454,106 @@ defmodule RDF.Vocabulary.Namespace do
|> Enum.map(&to_string/1) |> Enum.map(&to_string/1)
|> Enum.join("\n- ") |> Enum.join("\n- ")
end end
alias_violations = fn violations -> alias_violations = fn violations ->
violations violations
|> Enum.map(fn {term, original} -> |> Enum.map(fn {term, original} ->
"alias #{term} for #{term_to_iri(base_iri, original)}" "alias #{term} for #{term_to_iri(base_iri, original)}"
end) end)
|> Enum.join("\n- ") |> Enum.join("\n- ")
end end
violation_error_lines = violation_error_lines =
violations violations
|> Enum.map(fn |> Enum.map(fn
{:capitalized_term, violations} -> {:capitalized_term, violations} ->
""" """
Terms for properties should be lowercased, but the following properties are Terms for properties should be lowercased, but the following properties are
capitalized: capitalized:
- #{resource_name_violations.(violations)} - #{resource_name_violations.(violations)}
""" """
{:lowercased_term, violations} ->
"""
Terms for non-property resource should be capitalized, but the following
non-properties are lowercased:
- #{resource_name_violations.(violations)} {:lowercased_term, violations} ->
"""
Terms for non-property resource should be capitalized, but the following
non-properties are lowercased:
""" - #{resource_name_violations.(violations)}
{:capitalized_alias, violations} ->
"""
Terms for properties should be lowercased, but the following aliases for
properties are capitalized:
- #{alias_violations.(violations)} """
""" {:capitalized_alias, violations} ->
{:lowercased_alias, violations} -> """
""" Terms for properties should be lowercased, but the following aliases for
Terms for non-property resource should be capitalized, but the following properties are capitalized:
aliases for non-properties are lowercased:
- #{alias_violations.(violations)} - #{alias_violations.(violations)}
""" """
end)
|> Enum.join {:lowercased_alias, violations} ->
"""
Terms for non-property resource should be capitalized, but the following
aliases for non-properties are lowercased:
- #{alias_violations.(violations)}
"""
end)
|> Enum.join()
raise RDF.Namespace.InvalidTermError, """ raise RDF.Namespace.InvalidTermError, """
Case violations detected Case violations detected
#{violation_error_lines} #{violation_error_lines}
You have the following options: You have the following options:
- if you are in control of the vocabulary, consider renaming the resource - if you are in control of the vocabulary, consider renaming the resource
- define a properly cased alias with the :alias option on defvocab - define a properly cased alias with the :alias option on defvocab
- change the handling of case violations with the :case_violations option on defvocab - change the handling of case violations with the :case_violations option on defvocab
- ignore the resource with the :ignore option on defvocab - ignore the resource with the :ignore option on defvocab
""" """
end end
defp handle_case_violations(violations, :warn, terms, base_iri, _) do defp handle_case_violations(violations, :warn, terms, base_iri, _) do
for {type, violations} <- violations, for {type, violations} <- violations,
{term, original} <- violations do {term, original} <- violations do
case_violation_warning(type, term, original, base_iri) case_violation_warning(type, term, original, base_iri)
end end
terms terms
end end
defp case_violation_warning(:capitalized_term, term, _, base_iri) do defp case_violation_warning(:capitalized_term, term, _, base_iri) do
IO.warn "'#{term_to_iri(base_iri, term)}' is a capitalized property" IO.warn("'#{term_to_iri(base_iri, term)}' is a capitalized property")
end end
defp case_violation_warning(:lowercased_term, term, _, base_iri) do defp case_violation_warning(:lowercased_term, term, _, base_iri) do
IO.warn "'#{term_to_iri(base_iri, term)}' is a lowercased non-property resource" IO.warn("'#{term_to_iri(base_iri, term)}' is a lowercased non-property resource")
end end
defp case_violation_warning(:capitalized_alias, term, _, _) do defp case_violation_warning(:capitalized_alias, term, _, _) do
IO.warn "capitalized alias '#{term}' for a property" IO.warn("capitalized alias '#{term}' for a property")
end end
defp case_violation_warning(:lowercased_alias, term, _, _) do defp case_violation_warning(:lowercased_alias, term, _, _) do
IO.warn "lowercased alias '#{term}' for a non-property resource" IO.warn("lowercased alias '#{term}' for a non-property resource")
end end
defp filename!(opts) do defp filename!(opts) do
if filename = Keyword.get(opts, :file) do if filename = Keyword.get(opts, :file) do
cond do cond do
File.exists?(filename) -> File.exists?(filename) ->
filename filename
File.exists?(expanded_filename = Path.expand(filename, @vocabs_dir)) -> File.exists?(expanded_filename = Path.expand(filename, @vocabs_dir)) ->
expanded_filename expanded_filename
true -> true ->
raise File.Error, path: filename, action: "find", reason: :enoent raise File.Error, path: filename, action: "find", reason: :enoent
end end
end end
end end
@ -532,13 +564,13 @@ defmodule RDF.Vocabulary.Namespace do
defp rdf_data_vocab_terms(data, base_iri) do defp rdf_data_vocab_terms(data, base_iri) do
data data
|> RDF.Data.resources |> RDF.Data.resources()
|> Stream.filter(fn |> Stream.filter(fn
%RDF.IRI{} -> true %RDF.IRI{} -> true
_ -> false _ -> false
end) end)
|> Stream.map(&to_string/1) |> Stream.map(&to_string/1)
|> Stream.map(&(strip_base_iri(&1, base_iri))) |> Stream.map(&strip_base_iri(&1, base_iri))
|> Stream.filter(&vocab_term?/1) |> Stream.filter(&vocab_term?/1)
|> Enum.map(&String.to_atom/1) |> Enum.map(&String.to_atom/1)
end end
@ -546,17 +578,18 @@ defmodule RDF.Vocabulary.Namespace do
defp group_terms_by_case(terms) do defp group_terms_by_case(terms) do
terms terms
|> Enum.group_by(fn {term, _} -> |> Enum.group_by(fn {term, _} ->
if lowercase?(term), if lowercase?(term),
do: :lowercased, do: :lowercased,
else: :capitalized else: :capitalized
end) end)
|> Map.new(fn {group, term_mapping} -> |> Map.new(fn {group, term_mapping} ->
{group, Map.new(term_mapping)} {group, Map.new(term_mapping)}
end) end)
end end
defp lowercase?(term) when is_atom(term), defp lowercase?(term) when is_atom(term),
do: Atom.to_string(term) |> lowercase? do: Atom.to_string(term) |> lowercase?
defp lowercase?(term), defp lowercase?(term),
do: term =~ ~r/^(_|\p{Ll})/u do: term =~ ~r/^(_|\p{Ll})/u
@ -567,15 +600,18 @@ defmodule RDF.Vocabulary.Namespace do
end end
defp vocab_term?(""), do: false defp vocab_term?(""), do: false
defp vocab_term?(term) when is_binary(term) do defp vocab_term?(term) when is_binary(term) do
not String.contains?(term, "/") not String.contains?(term, "/")
end end
defp vocab_term?(_), do: false defp vocab_term?(_), do: false
@doc false @doc false
@spec term_to_iri(String.t, String.t | atom) :: RDF.IRI.t @spec term_to_iri(String.t(), String.t() | atom) :: RDF.IRI.t()
def term_to_iri(base_iri, term) when is_atom(term), def term_to_iri(base_iri, term) when is_atom(term),
do: term_to_iri(base_iri, Atom.to_string(term)) do: term_to_iri(base_iri, Atom.to_string(term))
def term_to_iri(base_iri, term), def term_to_iri(base_iri, term),
do: RDF.iri(base_iri <> term) do: RDF.iri(base_iri <> term)
@ -587,5 +623,4 @@ defmodule RDF.Vocabulary.Namespace do
_ -> false _ -> false
end end
end end
end end

View File

@ -49,6 +49,7 @@ defmodule RDF.XSD do
defdelegate unquote(String.to_atom(datatype.name))(value, opts), to: datatype, as: :new defdelegate unquote(String.to_atom(datatype.name))(value, opts), to: datatype, as: :new
elixir_name = Macro.underscore(datatype.name) elixir_name = Macro.underscore(datatype.name)
unless datatype.name == elixir_name do unless datatype.name == elixir_name do
defdelegate unquote(String.to_atom(elixir_name))(value), to: datatype, as: :new defdelegate unquote(String.to_atom(elixir_name))(value), to: datatype, as: :new
defdelegate unquote(String.to_atom(elixir_name))(value, opts), to: datatype, as: :new defdelegate unquote(String.to_atom(elixir_name))(value, opts), to: datatype, as: :new
@ -58,6 +59,6 @@ defmodule RDF.XSD do
defdelegate datetime(value), to: XSD.DateTime, as: :new defdelegate datetime(value), to: XSD.DateTime, as: :new
defdelegate datetime(value, opts), to: XSD.DateTime, as: :new defdelegate datetime(value, opts), to: XSD.DateTime, as: :new
defdelegate unquote(true)(), to: XSD.Boolean.Value defdelegate unquote(true)(), to: XSD.Boolean.Value
defdelegate unquote(false)(), to: XSD.Boolean.Value defdelegate unquote(false)(), to: XSD.Boolean.Value
end end

View File

@ -174,6 +174,7 @@ defmodule RDF.XSD.Datatype do
@doc false @doc false
def most_specific(left, right) def most_specific(left, right)
def most_specific(datatype, datatype), do: datatype def most_specific(datatype, datatype), do: datatype
def most_specific(left, right) do def most_specific(left, right) do
cond do cond do
left.datatype?(right) -> right left.datatype?(right) -> right
@ -182,7 +183,6 @@ defmodule RDF.XSD.Datatype do
end end
end end
defmacro __using__(opts) do defmacro __using__(opts) do
quote do quote do
defstruct [:value, :uncanonical_lexical] defstruct [:value, :uncanonical_lexical]
@ -201,10 +201,10 @@ defmodule RDF.XSD.Datatype do
} }
@doc !""" @doc !"""
This function is just used to check if a module is a RDF.XSD.Datatype. This function is just used to check if a module is a RDF.XSD.Datatype.
See `RDF.Literal.Datatype.Registry.is_xsd_datatype?/1`. See `RDF.Literal.Datatype.Registry.is_xsd_datatype?/1`.
""" """
def __xsd_datatype_indicator__, do: true def __xsd_datatype_indicator__, do: true
@doc """ @doc """
@ -214,19 +214,23 @@ defmodule RDF.XSD.Datatype do
def datatype?(%RDF.Literal{literal: literal}), do: datatype?(literal) def datatype?(%RDF.Literal{literal: literal}), do: datatype?(literal)
def datatype?(%datatype{}), do: datatype?(datatype) def datatype?(%datatype{}), do: datatype?(datatype)
def datatype?(__MODULE__), do: true def datatype?(__MODULE__), do: true
def datatype?(datatype) when maybe_module(datatype) do def datatype?(datatype) when maybe_module(datatype) do
RDF.XSD.datatype?(datatype) and datatype.derived_from?(__MODULE__) RDF.XSD.datatype?(datatype) and datatype.derived_from?(__MODULE__)
end end
def datatype?(_), do: false def datatype?(_), do: false
@doc false @doc false
def datatype!(%__MODULE__{}), do: true def datatype!(%__MODULE__{}), do: true
def datatype!((%datatype{} = literal)) do
def datatype!(%datatype{} = literal) do
datatype?(datatype) || datatype?(datatype) ||
raise RDF.XSD.Datatype.Mismatch, value: literal, expected_type: __MODULE__ raise RDF.XSD.Datatype.Mismatch, value: literal, expected_type: __MODULE__
end end
def datatype!(value), def datatype!(value),
do: raise RDF.XSD.Datatype.Mismatch, value: value, expected_type: __MODULE__ do: raise(RDF.XSD.Datatype.Mismatch, value: value, expected_type: __MODULE__)
@doc """ @doc """
Creates a new `RDF.Literal` with this datatype and the given `value`. Creates a new `RDF.Literal` with this datatype and the given `value`.
@ -288,7 +292,8 @@ defmodule RDF.XSD.Datatype do
end end
@doc false @doc false
@spec build_valid(any, RDF.XSD.Datatype.uncanonical_lexical(), Keyword.t()) :: RDF.Literal.t() @spec build_valid(any, RDF.XSD.Datatype.uncanonical_lexical(), Keyword.t()) ::
RDF.Literal.t()
def build_valid(value, lexical, opts) do def build_valid(value, lexical, opts) do
if Keyword.get(opts, :canonicalize) do if Keyword.get(opts, :canonicalize) do
literal(%__MODULE__{value: value}) literal(%__MODULE__{value: value})
@ -310,7 +315,6 @@ defmodule RDF.XSD.Datatype do
literal(%__MODULE__{uncanonical_lexical: init_invalid_lexical(lexical, opts)}) literal(%__MODULE__{uncanonical_lexical: init_invalid_lexical(lexical, opts)})
end end
@doc """ @doc """
Returns the value of a `RDF.Literal` of this or a derived datatype. Returns the value of a `RDF.Literal` of this or a derived datatype.
""" """
@ -342,7 +346,10 @@ defmodule RDF.XSD.Datatype do
""" """
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def canonical_lexical(%RDF.Literal{literal: literal}), do: canonical_lexical(literal) def canonical_lexical(%RDF.Literal{literal: literal}), do: canonical_lexical(literal)
def canonical_lexical(%__MODULE__{value: value}) when not is_nil(value), do: canonical_mapping(value)
def canonical_lexical(%__MODULE__{value: value}) when not is_nil(value),
do: canonical_mapping(value)
def canonical_lexical(_), do: nil def canonical_lexical(_), do: nil
@doc """ @doc """
@ -380,13 +387,16 @@ defmodule RDF.XSD.Datatype do
def valid?(%RDF.Literal{literal: literal}), do: valid?(literal) def valid?(%RDF.Literal{literal: literal}), do: valid?(literal)
def valid?(%__MODULE__{value: @invalid_value}), do: false def valid?(%__MODULE__{value: @invalid_value}), do: false
def valid?(%__MODULE__{}), do: true def valid?(%__MODULE__{}), do: true
def valid?((%datatype{} = literal)),
def valid?(%datatype{} = literal),
do: datatype?(datatype) and datatype.valid?(literal) do: datatype?(datatype) and datatype.valid?(literal)
def valid?(_), do: false def valid?(_), do: false
@doc false @doc false
defp equality_path(left_datatype, right_datatype) defp equality_path(left_datatype, right_datatype)
defp equality_path(datatype, datatype), do: {:same_or_derived, datatype} defp equality_path(datatype, datatype), do: {:same_or_derived, datatype}
defp equality_path(left_datatype, right_datatype) do defp equality_path(left_datatype, right_datatype) do
if RDF.XSD.datatype?(left_datatype) and RDF.XSD.datatype?(right_datatype) do if RDF.XSD.datatype?(left_datatype) and RDF.XSD.datatype?(right_datatype) do
if datatype = RDF.XSD.Datatype.most_specific(left_datatype, right_datatype) do if datatype = RDF.XSD.Datatype.most_specific(left_datatype, right_datatype) do
@ -399,7 +409,6 @@ defmodule RDF.XSD.Datatype do
end end
end end
@doc """ @doc """
Compares two `RDF.Literal`s. Compares two `RDF.Literal`s.
@ -409,14 +418,15 @@ defmodule RDF.XSD.Datatype do
due to their datatype, or `:indeterminate` is returned, when the order of the given values is due to their datatype, or `:indeterminate` is returned, when the order of the given values is
not defined on only partially ordered datatypes. not defined on only partially ordered datatypes.
""" """
@spec compare(RDF.Literal.t() | any, RDF.Literal.t() | any) :: RDF.Literal.Datatype.comparison_result | :indeterminate | nil @spec compare(RDF.Literal.t() | any, RDF.Literal.t() | any) ::
RDF.Literal.Datatype.comparison_result() | :indeterminate | nil
def compare(left, right) def compare(left, right)
def compare(left, %RDF.Literal{literal: right}), do: compare(left, right) def compare(left, %RDF.Literal{literal: right}), do: compare(left, right)
def compare(%RDF.Literal{literal: left}, right), do: compare(left, right) def compare(%RDF.Literal{literal: left}, right), do: compare(left, right)
def compare(left, right) do def compare(left, right) do
if RDF.XSD.datatype?(left) and RDF.XSD.datatype?(right) and if RDF.XSD.datatype?(left) and RDF.XSD.datatype?(right) and
RDF.Literal.Datatype.valid?(left) and RDF.Literal.Datatype.valid?(right) do RDF.Literal.Datatype.valid?(left) and RDF.Literal.Datatype.valid?(right) do
do_compare(left, right) do_compare(left, right)
end end
end end

View File

@ -58,25 +58,33 @@ defmodule RDF.XSD.Datatype.Primitive do
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_cast(value) do def do_cast(value) do
if datatype?(value) do # i.e. derived datatype # i.e. derived datatype
if datatype?(value) do
build_valid(value.value, value.uncanonical_lexical, []) build_valid(value.value, value.uncanonical_lexical, [])
end end
end end
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(%left_datatype{} = left, %right_datatype{} = right) do def do_equal_value_same_or_derived_datatypes?(
%left_datatype{} = left,
%right_datatype{} = right
) do
left_datatype.value(left) == right_datatype.value(right) left_datatype.value(left) == right_datatype.value(right)
end end
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: nil def do_equal_value_different_datatypes?(left, right), do: nil
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_compare(%left_datatype{} = left, %right_datatype{} = right) do def do_compare(%left_datatype{} = left, %right_datatype{} = right) do
if left_datatype.datatype?(right_datatype) or right_datatype.datatype?(left_datatype) do if left_datatype.datatype?(right_datatype) or right_datatype.datatype?(left_datatype) do
case {left_datatype.value(left), right_datatype.value(right)} do case {left_datatype.value(left), right_datatype.value(right)} do
{left_value, right_value} when left_value < right_value -> :lt {left_value, right_value} when left_value < right_value ->
{left_value, right_value} when left_value > right_value -> :gt :lt
{left_value, right_value} when left_value > right_value ->
:gt
_ -> _ ->
if left_datatype.equal_value?(left, right), do: :eq if left_datatype.equal_value?(left, right), do: :eq
end end

View File

@ -15,7 +15,6 @@ defmodule RDF.XSD.AnyURI do
import RDF.Guards import RDF.Guards
def_applicable_facet XSD.Facets.MinLength def_applicable_facet XSD.Facets.MinLength
def_applicable_facet XSD.Facets.MaxLength def_applicable_facet XSD.Facets.MaxLength
def_applicable_facet XSD.Facets.Length def_applicable_facet XSD.Facets.Length
@ -41,7 +40,6 @@ defmodule RDF.XSD.AnyURI do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
@spec lexical_mapping(String.t(), Keyword.t()) :: valid_value @spec lexical_mapping(String.t(), Keyword.t()) :: valid_value
def lexical_mapping(lexical, _), do: URI.parse(lexical) def lexical_mapping(lexical, _), do: URI.parse(lexical)

View File

@ -12,7 +12,6 @@ defmodule RDF.XSD.Boolean do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.Pattern def_applicable_facet XSD.Facets.Pattern
@doc false @doc false
@ -20,7 +19,6 @@ defmodule RDF.XSD.Boolean do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, _) do def lexical_mapping(lexical, _) do
with lexical do with lexical do
@ -142,6 +140,7 @@ defmodule RDF.XSD.Boolean do
@spec fn_not(input_value) :: t() | nil @spec fn_not(input_value) :: t() | nil
def fn_not(value) def fn_not(value)
def fn_not(%RDF.Literal{literal: literal}), do: fn_not(literal) def fn_not(%RDF.Literal{literal: literal}), do: fn_not(literal)
def fn_not(value) do def fn_not(value) do
case ebv(value) do case ebv(value) do
%RDF.Literal{literal: %__MODULE__{value: true}} -> XSD.Boolean.Value.false() %RDF.Literal{literal: %__MODULE__{value: true}} -> XSD.Boolean.Value.false()
@ -177,6 +176,7 @@ defmodule RDF.XSD.Boolean do
def logical_and(left, right) def logical_and(left, right)
def logical_and(%RDF.Literal{literal: left}, right), do: logical_and(left, right) def logical_and(%RDF.Literal{literal: left}, right), do: logical_and(left, right)
def logical_and(left, %RDF.Literal{literal: right}), do: logical_and(left, right) def logical_and(left, %RDF.Literal{literal: right}), do: logical_and(left, right)
def logical_and(left, right) do def logical_and(left, right) do
case ebv(left) do case ebv(left) do
%RDF.Literal{literal: %__MODULE__{value: false}} -> %RDF.Literal{literal: %__MODULE__{value: false}} ->
@ -223,6 +223,7 @@ defmodule RDF.XSD.Boolean do
def logical_or(left, right) def logical_or(left, right)
def logical_or(%RDF.Literal{literal: left}, right), do: logical_or(left, right) def logical_or(%RDF.Literal{literal: left}, right), do: logical_or(left, right)
def logical_or(left, %RDF.Literal{literal: right}), do: logical_or(left, right) def logical_or(left, %RDF.Literal{literal: right}), do: logical_or(left, right)
def logical_or(left, right) do def logical_or(left, right) do
case ebv(left) do case ebv(left) do
%RDF.Literal{literal: %__MODULE__{value: true}} -> %RDF.Literal{literal: %__MODULE__{value: true}} ->

View File

@ -1,14 +1,14 @@
defmodule RDF.XSD.Boolean.Value do defmodule RDF.XSD.Boolean.Value do
@moduledoc !""" @moduledoc !"""
This module holds the two boolean value literals, so they can be accessed This module holds the two boolean value literals, so they can be accessed
directly without needing to construct them every time. directly without needing to construct them every time.
They can't be defined in the RDF.XSD.Boolean module, because we can not use They can't be defined in the RDF.XSD.Boolean module, because we can not use
the `RDF.XSD.Boolean.new` function without having it compiled first. the `RDF.XSD.Boolean.new` function without having it compiled first.
""" """
@xsd_true RDF.XSD.Boolean.new(true) @xsd_true RDF.XSD.Boolean.new(true)
@xsd_false RDF.XSD.Boolean.new(false) @xsd_false RDF.XSD.Boolean.new(false)
def unquote(:true)(), do: @xsd_true def unquote(true)(), do: @xsd_true
def unquote(:false)(), do: @xsd_false def unquote(false)(), do: @xsd_false
end end

View File

@ -16,7 +16,6 @@ defmodule RDF.XSD.Date do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.ExplicitTimezone def_applicable_facet XSD.Facets.ExplicitTimezone
def_applicable_facet XSD.Facets.Pattern def_applicable_facet XSD.Facets.Pattern
@ -32,7 +31,6 @@ defmodule RDF.XSD.Date do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
# TODO: Are GMT/UTC actually allowed? Maybe because it is supported by Elixir's Datetime ... # TODO: Are GMT/UTC actually allowed? Maybe because it is supported by Elixir's Datetime ...
@grammar ~r/\A(-?\d{4}-\d{2}-\d{2})((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)?\Z/ @grammar ~r/\A(-?\d{4}-\d{2}-\d{2})((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)?\Z/
@tz_grammar ~r/\A((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)\Z/ @tz_grammar ~r/\A((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)\Z/
@ -165,7 +163,6 @@ defmodule RDF.XSD.Date do
end end
end end
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right) do def do_equal_value_same_or_derived_datatypes?(left, right) do
XSD.DateTime.equal_value?( XSD.DateTime.equal_value?(

View File

@ -11,7 +11,6 @@ defmodule RDF.XSD.DateTime do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.ExplicitTimezone def_applicable_facet XSD.Facets.ExplicitTimezone
def_applicable_facet XSD.Facets.Pattern def_applicable_facet XSD.Facets.Pattern
@ -27,7 +26,6 @@ defmodule RDF.XSD.DateTime do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, opts) do def lexical_mapping(lexical, opts) do
case DateTime.from_iso8601(lexical) do case DateTime.from_iso8601(lexical) do
@ -120,6 +118,7 @@ defmodule RDF.XSD.DateTime do
def tz(xsd_datetime) def tz(xsd_datetime)
def tz(%RDF.Literal{literal: xsd_datetime}), do: tz(xsd_datetime) def tz(%RDF.Literal{literal: xsd_datetime}), do: tz(xsd_datetime)
def tz(%__MODULE__{value: %NaiveDateTime{}}), do: "" def tz(%__MODULE__{value: %NaiveDateTime{}}), do: ""
def tz(date_time_literal) do def tz(date_time_literal) do
if valid?(date_time_literal) do if valid?(date_time_literal) do
date_time_literal date_time_literal
@ -134,6 +133,7 @@ defmodule RDF.XSD.DateTime do
@spec canonical_lexical_with_zone(RDF.Literal.t() | t()) :: String.t() | nil @spec canonical_lexical_with_zone(RDF.Literal.t() | t()) :: String.t() | nil
def canonical_lexical_with_zone(%RDF.Literal{literal: xsd_datetime}), def canonical_lexical_with_zone(%RDF.Literal{literal: xsd_datetime}),
do: canonical_lexical_with_zone(xsd_datetime) do: canonical_lexical_with_zone(xsd_datetime)
def canonical_lexical_with_zone(%__MODULE__{} = xsd_datetime) do def canonical_lexical_with_zone(%__MODULE__{} = xsd_datetime) do
case tz(xsd_datetime) do case tz(xsd_datetime) do
nil -> nil ->

View File

@ -12,7 +12,6 @@ defmodule RDF.XSD.Decimal do
alias RDF.XSD alias RDF.XSD
alias Elixir.Decimal, as: D alias Elixir.Decimal, as: D
def_applicable_facet XSD.Facets.MinInclusive def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive def_applicable_facet XSD.Facets.MinExclusive
@ -56,7 +55,6 @@ defmodule RDF.XSD.Decimal do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, opts) do def lexical_mapping(lexical, opts) do
if String.contains?(lexical, ~w[e E]) do if String.contains?(lexical, ~w[e E]) do
@ -136,7 +134,7 @@ defmodule RDF.XSD.Decimal do
XSD.Boolean.datatype?(literal) -> XSD.Boolean.datatype?(literal) ->
case literal.value do case literal.value do
false -> new(0.0) false -> new(0.0)
true -> new(1.0) true -> new(1.0)
end end
XSD.Integer.datatype?(literal) -> XSD.Integer.datatype?(literal) ->
@ -151,13 +149,13 @@ defmodule RDF.XSD.Decimal do
end end
end end
@impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right) def do_equal_value_different_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right) def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)
@ -179,7 +177,8 @@ defmodule RDF.XSD.Decimal do
|> datatype.canonical_lexical() |> datatype.canonical_lexical()
|> do_digit_count() |> do_digit_count()
true -> nil true ->
nil
end end
end end
@ -195,17 +194,21 @@ defmodule RDF.XSD.Decimal do
The number of digits to the right of the decimal point in the XML Schema canonical form of the literal value. The number of digits to the right of the decimal point in the XML Schema canonical form of the literal value.
""" """
@spec fraction_digit_count(RDF.Literal.t()) :: non_neg_integer | nil @spec fraction_digit_count(RDF.Literal.t()) :: non_neg_integer | nil
def fraction_digit_count(%RDF.Literal{literal: datatype_literal}), do: fraction_digit_count(datatype_literal) def fraction_digit_count(%RDF.Literal{literal: datatype_literal}),
do: fraction_digit_count(datatype_literal)
def fraction_digit_count(%datatype{} = literal) do def fraction_digit_count(%datatype{} = literal) do
cond do cond do
XSD.Integer.datatype?(literal) -> 0 XSD.Integer.datatype?(literal) ->
0
datatype?(literal) and datatype.valid?(literal) -> datatype?(literal) and datatype.valid?(literal) ->
literal literal
|> datatype.canonical_lexical() |> datatype.canonical_lexical()
|> do_fraction_digit_count() |> do_fraction_digit_count()
true -> nil true ->
nil
end end
end end

View File

@ -14,7 +14,6 @@ defmodule RDF.XSD.Double do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.MinInclusive def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive def_applicable_facet XSD.Facets.MinExclusive
@ -46,7 +45,6 @@ defmodule RDF.XSD.Double do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, opts) do def lexical_mapping(lexical, opts) do
case Float.parse(lexical) do case Float.parse(lexical) do
@ -143,7 +141,7 @@ defmodule RDF.XSD.Double do
XSD.Boolean.datatype?(literal) -> XSD.Boolean.datatype?(literal) ->
case literal.value do case literal.value do
false -> new(0.0) false -> new(0.0)
true -> new(1.0) true -> new(1.0)
end end
XSD.Integer.datatype?(literal) -> XSD.Integer.datatype?(literal) ->
@ -159,13 +157,13 @@ defmodule RDF.XSD.Double do
end end
end end
@impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right) def do_equal_value_different_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right) def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)

View File

@ -14,7 +14,6 @@ defmodule RDF.XSD.Integer do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.MinInclusive def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive def_applicable_facet XSD.Facets.MinExclusive
@ -52,7 +51,6 @@ defmodule RDF.XSD.Integer do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, _) do def lexical_mapping(lexical, _) do
case Integer.parse(lexical) do case Integer.parse(lexical) do
@ -80,7 +78,7 @@ defmodule RDF.XSD.Integer do
XSD.Boolean.datatype?(literal) -> XSD.Boolean.datatype?(literal) ->
case literal.value do case literal.value do
false -> new(0) false -> new(0)
true -> new(1) true -> new(1)
end end
XSD.Decimal.datatype?(literal) -> XSD.Decimal.datatype?(literal) ->
@ -89,7 +87,8 @@ defmodule RDF.XSD.Integer do
|> Decimal.to_integer() |> Decimal.to_integer()
|> new() |> new()
is_float(literal.value) and XSD.Double.datatype?(literal) -> # we're catching the XSD.Floats with this too # we're catching the XSD.Floats with this too
is_float(literal.value) and XSD.Double.datatype?(literal) ->
literal.value literal.value
|> trunc() |> trunc()
|> new() |> new()
@ -100,10 +99,12 @@ defmodule RDF.XSD.Integer do
end end
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right) def do_equal_value_same_or_derived_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right) def do_equal_value_different_datatypes?(left, right),
do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right) def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)

View File

@ -13,13 +13,13 @@ defmodule RDF.XSD.Numeric do
defdelegate datatype?(value), to: Literal.Datatype.Registry, as: :numeric_datatype? defdelegate datatype?(value), to: Literal.Datatype.Registry, as: :numeric_datatype?
@doc !""" @doc !"""
Tests for numeric value equality of two numeric XSD datatyped literals. Tests for numeric value equality of two numeric XSD datatyped literals.
see: see:
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping> - <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
- <https://www.w3.org/TR/xpath-functions/#func-numeric-equal> - <https://www.w3.org/TR/xpath-functions/#func-numeric-equal>
""" """
@spec do_equal_value?(t() | any, t() | any) :: boolean @spec do_equal_value?(t() | any, t() | any) :: boolean
def do_equal_value?(left, right) def do_equal_value?(left, right)
@ -52,14 +52,14 @@ defmodule RDF.XSD.Numeric do
defp new_decimal(value), do: D.new(value) defp new_decimal(value), do: D.new(value)
@doc !""" @doc !"""
Compares two numeric XSD literals. Compares two numeric XSD literals.
Returns `:gt` if first literal is greater than the second and `:lt` for vice Returns `:gt` if first literal is greater than the second and `:lt` for vice
versa. If the two literals are equal `:eq` is returned. versa. If the two literals are equal `:eq` is returned.
Returns `nil` when the given arguments are not comparable datatypes. Returns `nil` when the given arguments are not comparable datatypes.
""" """
@spec do_compare(t, t) :: Literal.Datatype.comparison_result() | nil @spec do_compare(t, t) :: Literal.Datatype.comparison_result() | nil
def do_compare(left, right) def do_compare(left, right)
@ -68,9 +68,15 @@ defmodule RDF.XSD.Numeric do
cond do cond do
XSD.Decimal.datatype?(left_datatype) or XSD.Decimal.datatype?(right_datatype) -> XSD.Decimal.datatype?(left_datatype) or XSD.Decimal.datatype?(right_datatype) ->
compare_decimal_value(left, right) compare_decimal_value(left, right)
left < right -> :lt
left > right -> :gt left < right ->
true -> :eq :lt
left > right ->
:gt
true ->
:eq
end end
end end
end end
@ -273,8 +279,9 @@ defmodule RDF.XSD.Numeric do
|> datatype.base_primitive().new() |> datatype.base_primitive().new()
value -> value ->
target_datatype = if XSD.Float.datatype?(datatype), target_datatype =
do: XSD.Float, else: datatype.base_primitive() if XSD.Float.datatype?(datatype), do: XSD.Float, else: datatype.base_primitive()
value value
|> Kernel.abs() |> Kernel.abs()
|> target_datatype.new() |> target_datatype.new()
@ -337,7 +344,7 @@ defmodule RDF.XSD.Numeric do
|> XSD.Decimal.new() |> XSD.Decimal.new()
(float_datatype = XSD.Float.datatype?(datatype)) or (float_datatype = XSD.Float.datatype?(datatype)) or
XSD.Double.datatype?(datatype) -> XSD.Double.datatype?(datatype) ->
if literal_value in ~w[nan positive_infinity negative_infinity]a do if literal_value in ~w[nan positive_infinity negative_infinity]a do
literal(value) literal(value)
else else
@ -400,7 +407,7 @@ defmodule RDF.XSD.Numeric do
|> XSD.Decimal.new() |> XSD.Decimal.new()
(float_datatype = XSD.Float.datatype?(datatype)) or (float_datatype = XSD.Float.datatype?(datatype)) or
XSD.Double.datatype?(datatype) -> XSD.Double.datatype?(datatype) ->
if literal_value in ~w[nan positive_infinity negative_infinity]a do if literal_value in ~w[nan positive_infinity negative_infinity]a do
literal(value) literal(value)
else else
@ -457,7 +464,7 @@ defmodule RDF.XSD.Numeric do
|> XSD.Decimal.new() |> XSD.Decimal.new()
(float_datatype = XSD.Float.datatype?(datatype)) or (float_datatype = XSD.Float.datatype?(datatype)) or
XSD.Double.datatype?(datatype) -> XSD.Double.datatype?(datatype) ->
if literal_value in ~w[nan positive_infinity negative_infinity]a do if literal_value in ~w[nan positive_infinity negative_infinity]a do
literal(value) literal(value)
else else
@ -483,11 +490,15 @@ defmodule RDF.XSD.Numeric do
end end
end end
defp arithmetic_operation(op, %Literal{literal: literal1}, literal2, fun), do: arithmetic_operation(op, literal1, literal2, fun) defp arithmetic_operation(op, %Literal{literal: literal1}, literal2, fun),
defp arithmetic_operation(op, literal1, %Literal{literal: literal2}, fun), do: arithmetic_operation(op, literal1, literal2, fun) do: arithmetic_operation(op, literal1, literal2, fun)
defp arithmetic_operation(op, literal1, %Literal{literal: literal2}, fun),
do: arithmetic_operation(op, literal1, literal2, fun)
defp arithmetic_operation(op, %datatype1{} = literal1, %datatype2{} = literal2, fun) do defp arithmetic_operation(op, %datatype1{} = literal1, %datatype2{} = literal2, fun) do
if datatype?(datatype1) and datatype?(datatype2) and if datatype?(datatype1) and datatype?(datatype2) and
Literal.Datatype.valid?(literal1) and Literal.Datatype.valid?(literal2) do Literal.Datatype.valid?(literal1) and Literal.Datatype.valid?(literal2) do
result_type = result_type(op, datatype1, datatype2) result_type = result_type(op, datatype1, datatype2)
{arg1, arg2} = type_conversion(literal1, literal2, result_type) {arg1, arg2} = type_conversion(literal1, literal2, result_type)
result = fun.(arg1.value, arg2.value, result_type) result = fun.(arg1.value, arg2.value, result_type)
@ -538,7 +549,8 @@ defmodule RDF.XSD.Numeric do
defp type_conversion(left, right, _), do: {left, right} defp type_conversion(left, right, _), do: {left, right}
@doc false @doc false
def result_type(op, left, right), do: do_result_type(op, base_primitive(left), base_primitive(right)) def result_type(op, left, right),
do: do_result_type(op, base_primitive(left), base_primitive(right))
defp do_result_type(_, XSD.Double, _), do: XSD.Double defp do_result_type(_, XSD.Double, _), do: XSD.Double
defp do_result_type(_, _, XSD.Double), do: XSD.Double defp do_result_type(_, _, XSD.Double), do: XSD.Double
@ -551,9 +563,10 @@ defmodule RDF.XSD.Numeric do
defp base_primitive(datatype) do defp base_primitive(datatype) do
primitive = datatype.base_primitive() primitive = datatype.base_primitive()
if primitive == XSD.Double and XSD.Float.datatype?(datatype), if primitive == XSD.Double and XSD.Float.datatype?(datatype),
do: XSD.Float, do: XSD.Float,
else: primitive else: primitive
end end
defp literal(value), do: %Literal{literal: value} defp literal(value), do: %Literal{literal: value}

View File

@ -11,7 +11,6 @@ defmodule RDF.XSD.String do
alias RDF.XSD alias RDF.XSD
def_applicable_facet XSD.Facets.MinLength def_applicable_facet XSD.Facets.MinLength
def_applicable_facet XSD.Facets.MaxLength def_applicable_facet XSD.Facets.MaxLength
def_applicable_facet XSD.Facets.Length def_applicable_facet XSD.Facets.Length
@ -37,7 +36,6 @@ defmodule RDF.XSD.String do
XSD.Facets.Pattern.conform?(pattern, value) XSD.Facets.Pattern.conform?(pattern, value)
end end
@impl XSD.Datatype @impl XSD.Datatype
@spec lexical_mapping(String.t(), Keyword.t()) :: valid_value @spec lexical_mapping(String.t(), Keyword.t()) :: valid_value
def lexical_mapping(lexical, _), do: to_string(lexical) def lexical_mapping(lexical, _), do: to_string(lexical)

View File

@ -20,7 +20,6 @@ defmodule RDF.XSD.Time do
@grammar ~r/\A(\d{2}:\d{2}:\d{2}(?:\.\d+)?)((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)?\Z/ @grammar ~r/\A(\d{2}:\d{2}:\d{2}(?:\.\d+)?)((?:[\+\-]\d{2}:\d{2})|UTC|GMT|Z)?\Z/
@tz_number_grammar ~r/\A(?:([\+\-])(\d{2}):(\d{2}))\Z/ @tz_number_grammar ~r/\A(?:([\+\-])(\d{2}):(\d{2}))\Z/
def_applicable_facet XSD.Facets.ExplicitTimezone def_applicable_facet XSD.Facets.ExplicitTimezone
def_applicable_facet XSD.Facets.Pattern def_applicable_facet XSD.Facets.Pattern
@ -36,7 +35,6 @@ defmodule RDF.XSD.Time do
XSD.Facets.Pattern.conform?(pattern, lexical) XSD.Facets.Pattern.conform?(pattern, lexical)
end end
@impl XSD.Datatype @impl XSD.Datatype
def lexical_mapping(lexical, opts) do def lexical_mapping(lexical, opts) do
case Regex.run(@grammar, lexical) do case Regex.run(@grammar, lexical) do
@ -196,8 +194,15 @@ defmodule RDF.XSD.Time do
@impl RDF.Literal.Datatype @impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right) def do_equal_value_same_or_derived_datatypes?(left, right)
def do_equal_value_same_or_derived_datatypes?(%{value: %{}}, %{value: tz_tuple}) when is_tuple(tz_tuple), do: nil
def do_equal_value_same_or_derived_datatypes?(%{value: tz_tuple}, %{value: %{}}) when is_tuple(tz_tuple), do: nil def do_equal_value_same_or_derived_datatypes?(%{value: %{}}, %{value: tz_tuple})
when is_tuple(tz_tuple),
do: nil
def do_equal_value_same_or_derived_datatypes?(%{value: tz_tuple}, %{value: %{}})
when is_tuple(tz_tuple),
do: nil
def do_equal_value_same_or_derived_datatypes?(left, right), do: super(left, right) def do_equal_value_same_or_derived_datatypes?(left, right), do: super(left, right)
@doc """ @doc """
@ -216,7 +221,8 @@ defmodule RDF.XSD.Time do
""" """
@spec canonical_lexical_with_zone(RDF.Literal.t() | t()) :: String.t() | nil @spec canonical_lexical_with_zone(RDF.Literal.t() | t()) :: String.t() | nil
def canonical_lexical_with_zone(%RDF.Literal{literal: xsd_time}), def canonical_lexical_with_zone(%RDF.Literal{literal: xsd_time}),
do: canonical_lexical_with_zone(xsd_time) do: canonical_lexical_with_zone(xsd_time)
def canonical_lexical_with_zone(%__MODULE__{} = xsd_time) do def canonical_lexical_with_zone(%__MODULE__{} = xsd_time) do
case tz(xsd_time) do case tz(xsd_time) do
nil -> nil ->

View File

@ -1,13 +1,13 @@
defmodule RDF.XSD.Facets.Pattern do defmodule RDF.XSD.Facets.Pattern do
use RDF.XSD.Facet, name: :pattern, type: String.t | [String.t] use RDF.XSD.Facet, name: :pattern, type: String.t() | [String.t()]
@doc !""" @doc !"""
A generic implementation for the `pattern_conform?/3` on the datatypes. A generic implementation for the `pattern_conform?/3` on the datatypes.
""" """
def conform?(pattern, lexical) def conform?(pattern, lexical)
def conform?(patterns, lexical) when is_list(patterns) do def conform?(patterns, lexical) when is_list(patterns) do
Enum.any?(patterns, &(conform?(&1, lexical))) Enum.any?(patterns, &conform?(&1, lexical))
end end
def conform?(pattern, lexical) do def conform?(pattern, lexical) do

View File

@ -1,11 +1,11 @@
defmodule RDF.XSD.Utils.Regex do defmodule RDF.XSD.Utils.Regex do
@moduledoc !""" @moduledoc !"""
XSD-flavoured regex matching. XSD-flavoured regex matching.
This is not intended to be used directly. This is not intended to be used directly.
Use `c:RDF.XSD.Datatype.matches?/3` implementations on the datatypes or Use `c:RDF.XSD.Datatype.matches?/3` implementations on the datatypes or
`RDF.Literal.matches?/3` instead. `RDF.Literal.matches?/3` instead.
""" """
@doc """ @doc """
Matches the string representation of the given value against a XPath and XQuery regular expression pattern. Matches the string representation of the given value against a XPath and XQuery regular expression pattern.

28
mix.exs
View File

@ -3,18 +3,18 @@ defmodule RDF.Mixfile do
@repo_url "https://github.com/rdf-elixir/rdf-ex" @repo_url "https://github.com/rdf-elixir/rdf-ex"
@version File.read!("VERSION") |> String.trim @version File.read!("VERSION") |> String.trim()
def project do def project do
[ [
app: :rdf, app: :rdf,
version: @version, version: @version,
elixir: "~> 1.8", elixir: "~> 1.8",
build_embedded: Mix.env == :prod, build_embedded: Mix.env() == :prod,
start_permanent: Mix.env == :prod, start_permanent: Mix.env() == :prod,
deps: deps(), deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()), elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers ++ [:protocol_ex], compilers: Mix.compilers() ++ [:protocol_ex],
# Dialyzer # Dialyzer
dialyzer: dialyzer(), dialyzer: dialyzer(),
@ -29,7 +29,7 @@ defmodule RDF.Mixfile do
main: "RDF", main: "RDF",
source_url: @repo_url, source_url: @repo_url,
source_ref: "v#{@version}", source_ref: "v#{@version}",
extras: ["CHANGELOG.md"], extras: ["CHANGELOG.md"]
], ],
# ExCoveralls # ExCoveralls
@ -39,7 +39,7 @@ defmodule RDF.Mixfile do
"coveralls.detail": :test, "coveralls.detail": :test,
"coveralls.post": :test, "coveralls.post": :test,
"coveralls.html": :test "coveralls.html": :test
], ]
] ]
end end
@ -56,7 +56,7 @@ defmodule RDF.Mixfile do
links: %{ links: %{
"Homepage" => "https://rdf-elixir.dev", "Homepage" => "https://rdf-elixir.dev",
"GitHub" => @repo_url, "GitHub" => @repo_url,
"Changelog" => @repo_url <> "/blob/master/CHANGELOG.md", "Changelog" => @repo_url <> "/blob/master/CHANGELOG.md"
}, },
files: ~w[lib src/*.xrl src/*.yrl priv mix.exs .formatter.exs VERSION *.md] files: ~w[lib src/*.xrl src/*.yrl priv mix.exs .formatter.exs VERSION *.md]
] ]
@ -70,13 +70,11 @@ defmodule RDF.Mixfile do
[ [
{:decimal, "~> 1.5"}, {:decimal, "~> 1.5"},
{:protocol_ex, "~> 0.4"}, {:protocol_ex, "~> 0.4"},
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
{:credo, "~> 1.4", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false}, {:ex_doc, "~> 0.22", only: :dev, runtime: false},
{:ex_doc, "~> 0.22", only: :dev, runtime: false}, {:excoveralls, "~> 0.13", only: :test},
{:excoveralls, "~> 0.13", only: :test}, {:benchee, "~> 1.0", only: :bench}
{:benchee, "~> 1.0", only: :bench},
] ]
end end
@ -89,5 +87,5 @@ defmodule RDF.Mixfile do
end end
defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"] defp elixirc_paths(_), do: ["lib"]
end end

View File

@ -7,15 +7,14 @@ defmodule RDF.NQuads.W3C.TestSuite do
use ExUnit.Case, async: false use ExUnit.Case, async: false
@w3c_nquads_test_suite Path.join(RDF.TestData.dir, "N-QUADS-TESTS") @w3c_nquads_test_suite Path.join(RDF.TestData.dir(), "N-QUADS-TESTS")
ExUnit.Case.register_attribute(__ENV__, :nq_test)
ExUnit.Case.register_attribute __ENV__, :nq_test
@w3c_nquads_test_suite @w3c_nquads_test_suite
|> File.ls! |> File.ls!()
|> Enum.filter(fn (file) -> Path.extname(file) == ".nq" end) |> Enum.filter(fn file -> Path.extname(file) == ".nq" end)
|> Enum.each(fn (file) -> |> Enum.each(fn file ->
@nq_test file: Path.join(@w3c_nquads_test_suite, file) @nq_test file: Path.join(@w3c_nquads_test_suite, file)
if file |> String.contains?("-bad-") do if file |> String.contains?("-bad-") do
test "Negative syntax test: #{file}", context do test "Negative syntax test: #{file}", context do
@ -27,5 +26,4 @@ defmodule RDF.NQuads.W3C.TestSuite do
end end
end end
end) end)
end end

View File

@ -7,14 +7,14 @@ defmodule RDF.NTriples.W3C.TestSuite do
use ExUnit.Case, async: false use ExUnit.Case, async: false
@w3c_ntriples_test_suite Path.join(RDF.TestData.dir, "N-TRIPLES-TESTS") @w3c_ntriples_test_suite Path.join(RDF.TestData.dir(), "N-TRIPLES-TESTS")
ExUnit.Case.register_attribute __ENV__, :nt_test ExUnit.Case.register_attribute(__ENV__, :nt_test)
@w3c_ntriples_test_suite @w3c_ntriples_test_suite
|> File.ls! |> File.ls!()
|> Enum.filter(fn (file) -> Path.extname(file) == ".nt" end) |> Enum.filter(fn file -> Path.extname(file) == ".nt" end)
|> Enum.each(fn (file) -> |> Enum.each(fn file ->
@nt_test file: Path.join(@w3c_ntriples_test_suite, file) @nt_test file: Path.join(@w3c_ntriples_test_suite, file)
if file |> String.contains?("-bad-") do if file |> String.contains?("-bad-") do
test "Negative syntax test: #{file}", context do test "Negative syntax test: #{file}", context do
@ -26,5 +26,4 @@ defmodule RDF.NTriples.W3C.TestSuite do
end end
end end
end) end)
end end

View File

@ -8,7 +8,7 @@ defmodule RDF.Turtle.W3C.Test do
""" """
use ExUnit.Case, async: false use ExUnit.Case, async: false
ExUnit.Case.register_attribute __ENV__, :test_case ExUnit.Case.register_attribute(__ENV__, :test_case)
alias RDF.{Turtle, TestSuite, NTriples} alias RDF.{Turtle, TestSuite, NTriples}
alias TestSuite.NS.RDFT alias TestSuite.NS.RDFT
@ -17,8 +17,8 @@ defmodule RDF.Turtle.W3C.Test do
TestSuite.test_cases("Turtle", RDFT.TestTurtleEval, base: @base) TestSuite.test_cases("Turtle", RDFT.TestTurtleEval, base: @base)
|> Enum.each(fn test_case -> |> Enum.each(fn test_case ->
@tag test_case: test_case @tag test_case: test_case
if TestSuite.test_name(test_case) in ~w[ if TestSuite.test_name(test_case) in ~w[
anonymous_blank_node_subject anonymous_blank_node_subject
anonymous_blank_node_object anonymous_blank_node_object
labeled_blank_node_subject labeled_blank_node_subject
@ -45,58 +45,61 @@ defmodule RDF.Turtle.W3C.Test do
turtle-subm-10 turtle-subm-10
turtle-subm-14 turtle-subm-14
] do ] do
@tag skip: """ @tag skip: """
The produced graphs are correct, but have different blank node labels than the result graph. The produced graphs are correct, but have different blank node labels than the result graph.
TODO: Implement a graph isomorphism algorithm. TODO: Implement a graph isomorphism algorithm.
""" """
end end
test TestSuite.test_title(test_case), %{test_case: test_case} do test TestSuite.test_title(test_case), %{test_case: test_case} do
with base = to_string(TestSuite.test_input_file(test_case)) do with base = to_string(TestSuite.test_input_file(test_case)) do
assert RDF.Graph.equal?( assert RDF.Graph.equal?(
(TestSuite.test_input_file_path(test_case, "Turtle") TestSuite.test_input_file_path(test_case, "Turtle")
|> Turtle.read_file!(base: base)), |> Turtle.read_file!(base: base),
(TestSuite.test_result_file_path(test_case, "Turtle") TestSuite.test_result_file_path(test_case, "Turtle")
|> NTriples.read_file!) |> NTriples.read_file!()
) )
end end
end end
end) end)
TestSuite.test_cases("Turtle", RDFT.TestTurtlePositiveSyntax, base: @base) TestSuite.test_cases("Turtle", RDFT.TestTurtlePositiveSyntax, base: @base)
|> Enum.each(fn test_case -> |> Enum.each(fn test_case ->
@tag test_case: test_case @tag test_case: test_case
test TestSuite.test_title(test_case), %{test_case: test_case} do test TestSuite.test_title(test_case), %{test_case: test_case} do
with base = to_string(TestSuite.test_input_file(test_case)) do with base = to_string(TestSuite.test_input_file(test_case)) do
assert {:ok, _} = assert {:ok, _} =
TestSuite.test_input_file_path(test_case, "Turtle") |> Turtle.read_file(base: base) TestSuite.test_input_file_path(test_case, "Turtle")
end |> Turtle.read_file(base: base)
end end
end) end
end)
TestSuite.test_cases("Turtle", RDFT.TestTurtleNegativeSyntax, base: @base) TestSuite.test_cases("Turtle", RDFT.TestTurtleNegativeSyntax, base: @base)
|> Enum.each(fn test_case -> |> Enum.each(fn test_case ->
@tag test_case: test_case @tag test_case: test_case
test TestSuite.test_title(test_case), %{test_case: test_case} do test TestSuite.test_title(test_case), %{test_case: test_case} do
with base = to_string(TestSuite.test_input_file(test_case)) do with base = to_string(TestSuite.test_input_file(test_case)) do
assert {:error, _} = assert {:error, _} =
TestSuite.test_input_file_path(test_case, "Turtle") |> Turtle.read_file(base: base) TestSuite.test_input_file_path(test_case, "Turtle")
end |> Turtle.read_file(base: base)
end end
end) end
end)
TestSuite.test_cases("Turtle", RDFT.TestTurtleNegativeEval, base: @base) TestSuite.test_cases("Turtle", RDFT.TestTurtleNegativeEval, base: @base)
|> Enum.each(fn test_case -> |> Enum.each(fn test_case ->
if TestSuite.test_name(test_case) in ~w[turtle-eval-bad-01 turtle-eval-bad-02 turtle-eval-bad-03] do if TestSuite.test_name(test_case) in ~w[turtle-eval-bad-01 turtle-eval-bad-02 turtle-eval-bad-03] do
@tag skip: "TODO: IRI validation" @tag skip: "TODO: IRI validation"
end end
@tag test_case: test_case
test TestSuite.test_title(test_case), %{test_case: test_case} do
with base = to_string(TestSuite.test_input_file(test_case)) do
assert {:error, _} =
TestSuite.test_input_file_path(test_case, "Turtle") |> Turtle.read_file(base: base)
end
end
end)
@tag test_case: test_case
test TestSuite.test_title(test_case), %{test_case: test_case} do
with base = to_string(TestSuite.test_input_file(test_case)) do
assert {:error, _} =
TestSuite.test_input_file_path(test_case, "Turtle")
|> Turtle.read_file(base: base)
end
end
end)
end end

View File

@ -2,13 +2,9 @@ defmodule RDF.Test.Case do
use ExUnit.CaseTemplate use ExUnit.CaseTemplate
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab EX, defvocab EX, base_iri: "http://example.com/", terms: [], strict: false
base_iri: "http://example.com/",
terms: [], strict: false
defvocab FOAF, defvocab FOAF, base_iri: "http://xmlns.com/foaf/0.1/", terms: [], strict: false
base_iri: "http://xmlns.com/foaf/0.1/",
terms: [], strict: false
alias RDF.{Dataset, Graph, Description, IRI} alias RDF.{Dataset, Graph, Description, IRI}
import RDF, only: [iri: 1] import RDF, only: [iri: 1]
@ -31,11 +27,12 @@ defmodule RDF.Test.Case do
############################### ###############################
# RDF.Description # RDF.Description
def description, do: Description.new(EX.Subject) def description, do: Description.new(EX.Subject)
def description(content), do: Description.add(description(), content) def description(content), do: Description.add(description(), content)
def description_of_subject(%Description{subject: subject}, subject), def description_of_subject(%Description{subject: subject}, subject),
do: true do: true
def description_of_subject(_, _), def description_of_subject(_, _),
do: false do: false
@ -53,17 +50,17 @@ defmodule RDF.Test.Case do
def graph, do: unnamed_graph() def graph, do: unnamed_graph()
def unnamed_graph, do: Graph.new def unnamed_graph, do: Graph.new()
def named_graph(name \\ EX.GraphName), do: Graph.new(name: name) def named_graph(name \\ EX.GraphName), do: Graph.new(name: name)
def unnamed_graph?(%Graph{name: nil}), do: true def unnamed_graph?(%Graph{name: nil}), do: true
def unnamed_graph?(_), do: false def unnamed_graph?(_), do: false
def named_graph?(%Graph{name: %IRI{}}), do: true def named_graph?(%Graph{name: %IRI{}}), do: true
def named_graph?(_), do: false def named_graph?(_), do: false
def named_graph?(%Graph{name: name}, name), do: true def named_graph?(%Graph{name: name}, name), do: true
def named_graph?(_, _), do: false def named_graph?(_, _), do: false
def empty_graph?(%Graph{descriptions: descriptions}), def empty_graph?(%Graph{descriptions: descriptions}),
do: descriptions == %{} do: descriptions == %{}
@ -74,40 +71,40 @@ defmodule RDF.Test.Case do
|> Enum.member?(statement) |> Enum.member?(statement)
end end
############################### ###############################
# RDF.Dataset # RDF.Dataset
def dataset, do: unnamed_dataset() def dataset, do: unnamed_dataset()
def unnamed_dataset, do: Dataset.new def unnamed_dataset, do: Dataset.new()
def named_dataset(name \\ EX.DatasetName), do: Dataset.new(name: name) def named_dataset(name \\ EX.DatasetName), do: Dataset.new(name: name)
def unnamed_dataset?(%Dataset{name: nil}), do: true def unnamed_dataset?(%Dataset{name: nil}), do: true
def unnamed_dataset?(_), do: false def unnamed_dataset?(_), do: false
def named_dataset?(%Dataset{name: %IRI{}}), do: true def named_dataset?(%Dataset{name: %IRI{}}), do: true
def named_dataset?(_), do: false def named_dataset?(_), do: false
def named_dataset?(%Dataset{name: name}, name), do: true def named_dataset?(%Dataset{name: name}, name), do: true
def named_dataset?(_, _), do: false def named_dataset?(_, _), do: false
def empty_dataset?(%Dataset{graphs: graphs}), do: graphs == %{} def empty_dataset?(%Dataset{graphs: graphs}), do: graphs == %{}
def dataset_includes_statement?(dataset, {_, _, _} = statement) do def dataset_includes_statement?(dataset, {_, _, _} = statement) do
dataset dataset
|> Dataset.default_graph |> Dataset.default_graph()
|> graph_includes_statement?(statement) |> graph_includes_statement?(statement)
end end
def dataset_includes_statement?(dataset, {subject, predicate, objects, nil}), def dataset_includes_statement?(dataset, {subject, predicate, objects, nil}),
do: dataset_includes_statement?(dataset, {subject, predicate, objects}) do: dataset_includes_statement?(dataset, {subject, predicate, objects})
def dataset_includes_statement?(dataset, def dataset_includes_statement?(
{subject, predicate, objects, graph_context}) do dataset,
{subject, predicate, objects, graph_context}
) do
dataset.graphs dataset.graphs
|> Map.get(iri(graph_context), named_graph(graph_context)) |> Map.get(iri(graph_context), named_graph(graph_context))
|> graph_includes_statement?({subject, predicate, objects}) |> graph_includes_statement?({subject, predicate, objects})
end end
end end

View File

@ -8,18 +8,18 @@ defmodule RDF.Query.Test.Case do
alias RDF.Query.BGP alias RDF.Query.BGP
import unquote(__MODULE__) import unquote(__MODULE__)
end end
end end
alias RDF.Query.BGP alias RDF.Query.BGP
def bgp_struct(), do: %BGP{triple_patterns: []} def bgp_struct(), do: %BGP{triple_patterns: []}
def bgp_struct(triple_patterns) when is_list(triple_patterns), def bgp_struct(triple_patterns) when is_list(triple_patterns),
do: %BGP{triple_patterns: triple_patterns} do: %BGP{triple_patterns: triple_patterns}
def bgp_struct({_, _, _} = triple_pattern), def bgp_struct({_, _, _} = triple_pattern),
do: %BGP{triple_patterns: [triple_pattern]} do: %BGP{triple_patterns: [triple_pattern]}
def ok_bgp_struct(triple_patterns), do: {:ok, bgp_struct(triple_patterns)} def ok_bgp_struct(triple_patterns), do: {:ok, bgp_struct(triple_patterns)}
end end

View File

@ -1,14 +1,12 @@
defmodule RDF.TestData do defmodule RDF.TestData do
@dir Path.join(File.cwd!(), "test/data/")
@dir Path.join(File.cwd!, "test/data/")
def dir, do: @dir def dir, do: @dir
def file(name) do def file(name) do
if (path = Path.join(@dir, name)) |> File.exists? do if (path = Path.join(@dir, name)) |> File.exists?() do
path path
else else
raise "Test data file '#{name}' not found" raise "Test data file '#{name}' not found"
end end
end end
end end

View File

@ -1,39 +1,39 @@
defmodule RDF.TestDatatypes do defmodule RDF.TestDatatypes do
defmodule Initials do defmodule Initials do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "initials", name: "initials",
id: "http://example.com/initials", id: "http://example.com/initials",
base: RDF.XSD.String base: RDF.XSD.String
def_facet_constraint RDF.XSD.Facets.Length, 2 def_facet_constraint RDF.XSD.Facets.Length, 2
end end
defmodule UsZipcode do defmodule UsZipcode do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "us_zipcode", name: "us_zipcode",
id: "http://example.com/us-zipcode", id: "http://example.com/us-zipcode",
base: RDF.XSD.String base: RDF.XSD.String
def_facet_constraint RDF.XSD.Facets.Pattern, "[0-9]{5}(-[0-9]{4})?" def_facet_constraint RDF.XSD.Facets.Pattern, "[0-9]{5}(-[0-9]{4})?"
end end
defmodule AltUsZipcode do defmodule AltUsZipcode do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "alt_us_zipcode", name: "alt_us_zipcode",
id: "http://example.com/alt-us-zipcode", id: "http://example.com/alt-us-zipcode",
base: RDF.XSD.String base: RDF.XSD.String
def_facet_constraint RDF.XSD.Facets.Pattern, [ def_facet_constraint RDF.XSD.Facets.Pattern, [
"[0-9]{5}", "[0-9]{5}",
"[0-9]{5}-[0-9]{4}", "[0-9]{5}-[0-9]{4}"
] ]
end end
defmodule Age do defmodule Age do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "age", name: "age",
id: "http://example.com/Age", id: "http://example.com/Age",
base: RDF.XSD.PositiveInteger base: RDF.XSD.PositiveInteger
def_facet_constraint RDF.XSD.Facets.MaxInclusive, 150 def_facet_constraint RDF.XSD.Facets.MaxInclusive, 150
@ -43,9 +43,9 @@ defmodule RDF.TestDatatypes do
defmodule DecimalUnitInterval do defmodule DecimalUnitInterval do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "decimal_unit_interval", name: "decimal_unit_interval",
id: "http://example.com/decimalUnitInterval", id: "http://example.com/decimalUnitInterval",
base: RDF.XSD.Decimal base: RDF.XSD.Decimal
def_facet_constraint RDF.XSD.Facets.MinInclusive, 0 def_facet_constraint RDF.XSD.Facets.MinInclusive, 0
def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1 def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1
@ -53,9 +53,9 @@ defmodule RDF.TestDatatypes do
defmodule DoubleUnitInterval do defmodule DoubleUnitInterval do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "double_unit_interval", name: "double_unit_interval",
id: "http://example.com/doubleUnitInterval", id: "http://example.com/doubleUnitInterval",
base: RDF.XSD.Double base: RDF.XSD.Double
def_facet_constraint RDF.XSD.Facets.MinInclusive, 0 def_facet_constraint RDF.XSD.Facets.MinInclusive, 0
def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1 def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1
@ -63,9 +63,9 @@ defmodule RDF.TestDatatypes do
defmodule FloatUnitInterval do defmodule FloatUnitInterval do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "float_unit_interval", name: "float_unit_interval",
id: "http://example.com/floatUnitInterval", id: "http://example.com/floatUnitInterval",
base: RDF.XSD.Float base: RDF.XSD.Float
def_facet_constraint RDF.XSD.Facets.MinInclusive, 0 def_facet_constraint RDF.XSD.Facets.MinInclusive, 0
def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1 def_facet_constraint RDF.XSD.Facets.MaxInclusive, 1
@ -73,27 +73,27 @@ defmodule RDF.TestDatatypes do
defmodule DateTimeWithTz do defmodule DateTimeWithTz do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "datetime_with_tz", name: "datetime_with_tz",
id: "http://example.com/datetime-with-tz", id: "http://example.com/datetime-with-tz",
base: RDF.XSD.DateTime base: RDF.XSD.DateTime
def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :required def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :required
end end
defmodule DateWithoutTz do defmodule DateWithoutTz do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "date_with_tz", name: "date_with_tz",
id: "http://example.com/date-with-tz", id: "http://example.com/date-with-tz",
base: RDF.XSD.Date base: RDF.XSD.Date
def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :prohibited def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :prohibited
end end
defmodule CustomTime do defmodule CustomTime do
use RDF.XSD.Datatype.Restriction, use RDF.XSD.Datatype.Restriction,
name: "time_with_tz", name: "time_with_tz",
id: "http://example.com/time-with-tz", id: "http://example.com/time-with-tz",
base: RDF.XSD.Time base: RDF.XSD.Time
def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :optional def_facet_constraint RDF.XSD.Facets.ExplicitTimezone, :optional
end end

View File

@ -1,49 +1,53 @@
defmodule RDF.TestLiterals do defmodule RDF.TestLiterals do
alias RDF.Literal alias RDF.Literal
alias RDF.NS.XSD alias RDF.NS.XSD
def value(:empty), do: [""] def value(:empty), do: [""]
def value(:plain), do: ["Hello"] def value(:plain), do: ["Hello"]
def value(:empty_lang), do: ["", [language: "en"]] def value(:empty_lang), do: ["", [language: "en"]]
def value(:plain_lang), do: ["Hello", [language: "en"]] def value(:plain_lang), do: ["Hello", [language: "en"]]
def value(:typed_string), do: ["String", [datatype: XSD.string]] def value(:typed_string), do: ["String", [datatype: XSD.string()]]
def value(:uri), do: [URI.parse("http://example.com")] def value(:uri), do: [URI.parse("http://example.com")]
def value(:true), do: [true] def value(true), do: [true]
def value(:false), do: [false] def value(false), do: [false]
def value(:int), do: [123] def value(:int), do: [123]
def value(:neg_int), do: [-123] def value(:neg_int), do: [-123]
def value(:decimal), do: [Decimal.from_float(3.14)] def value(:decimal), do: [Decimal.from_float(3.14)]
def value(:long), do: [9223372036854775807] def value(:long), do: [9_223_372_036_854_775_807]
def value(:double), do: [3.1415] def value(:double), do: [3.1415]
def value(:date), do: [~D[2017-04-13]] def value(:date), do: [~D[2017-04-13]]
def value(:naive_datetime), do: [~N[2017-04-14 15:32:07]] def value(:naive_datetime), do: [~N[2017-04-14 15:32:07]]
def value(:datetime), do: ["2017-04-14 15:32:07Z" |> DateTime.from_iso8601 |> elem(1)] def value(:datetime), do: ["2017-04-14 15:32:07Z" |> DateTime.from_iso8601() |> elem(1)]
def value(:time), do: [~T[01:02:03]] def value(:time), do: [~T[01:02:03]]
def value(selector) do def value(selector) do
raise "unexpected literal: :#{selector}" raise "unexpected literal: :#{selector}"
end end
def values(:all_simple), def values(:all_simple),
do: Enum.map(~W(empty plain typed_string)a, &value/1) do: Enum.map(~W(empty plain typed_string)a, &value/1)
def values(:all_plain_lang), def values(:all_plain_lang),
do: Enum.map(~W[empty_lang plain_lang]a, &value/1) do: Enum.map(~W[empty_lang plain_lang]a, &value/1)
def values(:all_native), def values(:all_native),
do: Enum.map(~W[false true int long double time date datetime naive_datetime]a, &value/1) do: Enum.map(~W[false true int long double time date datetime naive_datetime]a, &value/1)
def values(:all_plain), def values(:all_plain),
do: values(~W[all_simple all_plain_lang]a) do: values(~W[all_simple all_plain_lang]a)
def values(:all), def values(:all),
do: values(~W[all_native all_plain]a) do: values(~W[all_native all_plain]a)
def values(selectors) when is_list(selectors) do def values(selectors) when is_list(selectors) do
Enum.reduce selectors, [], fn(selector, values) -> Enum.reduce(selectors, [], fn selector, values ->
values ++ values(selector) values ++ values(selector)
end end)
end end
def literal(selector), def literal(selector),
do: apply(Literal, :new, value(selector)) do: apply(Literal, :new, value(selector))
def literals(selectors), def literals(selectors),
do: Enum.map values(selectors), fn value -> apply(Literal, :new, value) end do: Enum.map(values(selectors), fn value -> apply(Literal, :new, value) end)
end end

View File

@ -1,11 +1,11 @@
defmodule RDF.TestSuite do defmodule RDF.TestSuite do
defmodule NS do defmodule NS do
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab MF, defvocab MF,
base_iri: "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#", base_iri: "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#",
terms: [], strict: false terms: [],
strict: false
defvocab RDFT, defvocab RDFT,
base_iri: "http://www.w3.org/ns/rdftest#", base_iri: "http://www.w3.org/ns/rdftest#",
@ -25,10 +25,9 @@ defmodule RDF.TestSuite do
alias RDF.{Turtle, Graph, Description, IRI} alias RDF.{Turtle, Graph, Description, IRI}
def dir(format), do: Path.join(RDF.TestData.dir(), String.upcase(format) <> "-TESTS")
def dir(format), do: Path.join(RDF.TestData.dir, String.upcase(format) <> "-TESTS")
def file(filename, format), do: format |> dir |> Path.join(filename) def file(filename, format), do: format |> dir |> Path.join(filename)
def manifest_path(format), do: file("manifest.ttl", format) def manifest_path(format), do: file("manifest.ttl", format)
def manifest_graph(format, opts \\ []) do def manifest_graph(format, opts \\ []) do
format format
@ -39,34 +38,32 @@ defmodule RDF.TestSuite do
def test_cases(format, test_type, opts) do def test_cases(format, test_type, opts) do
format format
|> manifest_graph(opts) |> manifest_graph(opts)
|> Graph.descriptions |> Graph.descriptions()
|> Enum.filter(fn description -> |> Enum.filter(fn description ->
RDF.iri(test_type) in Description.get(description, RDF.type, []) RDF.iri(test_type) in Description.get(description, RDF.type(), [])
end) end)
end end
def test_name(test_case), do: value(test_case, MF.name) def test_name(test_case), do: value(test_case, MF.name())
def test_title(test_case), def test_title(test_case),
# Unfortunately OTP < 20 doesn't support unicode characters in atoms, # Unfortunately OTP < 20 doesn't support unicode characters in atoms,
# so we can't put the description in the test name # so we can't put the description in the test name
# do: test_name(test_case) <> ": " <> value(test_case, RDFS.comment) # do: test_name(test_case) <> ": " <> value(test_case, RDFS.comment)
do: test_name(test_case) do: test_name(test_case)
def test_input_file(test_case), def test_input_file(test_case),
do: test_case |> Description.first(MF.action) |> IRI.parse do: test_case |> Description.first(MF.action()) |> IRI.parse()
def test_output_file(test_case), def test_output_file(test_case),
do: test_case |> Description.first(MF.result) |> IRI.parse do: test_case |> Description.first(MF.result()) |> IRI.parse()
def test_input_file_path(test_case, format), def test_input_file_path(test_case, format),
do: test_input_file(test_case).path |> Path.basename |> file(format) do: test_input_file(test_case).path |> Path.basename() |> file(format)
def test_result_file_path(test_case, format), def test_result_file_path(test_case, format),
do: test_output_file(test_case).path |> Path.basename |> file(format) do: test_output_file(test_case).path |> Path.basename() |> file(format)
defp value(description, property), defp value(description, property),
do: Description.first(description, property) |> to_string do: Description.first(description, property) |> to_string
end end

View File

@ -7,8 +7,7 @@ defmodule RDF.XSD.Datatype.Test.Case do
datatype = Keyword.fetch!(opts, :datatype) datatype = Keyword.fetch!(opts, :datatype)
datatype_name = Keyword.fetch!(opts, :name) datatype_name = Keyword.fetch!(opts, :name)
datatype_iri = datatype_iri = Keyword.get(opts, :iri, RDF.NS.XSD.__base_iri__() <> datatype_name)
Keyword.get(opts, :iri, RDF.NS.XSD.__base_iri__ <> datatype_name)
valid = Keyword.get(opts, :valid) valid = Keyword.get(opts, :valid)
invalid = Keyword.get(opts, :invalid) invalid = Keyword.get(opts, :invalid)
@ -116,45 +115,48 @@ defmodule RDF.XSD.Datatype.Test.Case do
describe "general datatype?/1" do describe "general datatype?/1" do
test "on the exact same datatype" do test "on the exact same datatype" do
assert (unquote(datatype).datatype?(unquote(datatype))) == true assert unquote(datatype).datatype?(unquote(datatype)) == true
Enum.each(@valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
literal = unquote(datatype).new(input) literal = unquote(datatype).new(input)
assert (unquote(datatype).datatype?(literal)) == true assert unquote(datatype).datatype?(literal) == true
assert (unquote(datatype).datatype?(literal.literal)) == true assert unquote(datatype).datatype?(literal.literal) == true
end) end)
end end
unless unquote(primitive) do unless unquote(primitive) do
test "on the base datatype" do test "on the base datatype" do
assert (unquote(base).datatype?(unquote(datatype))) == true assert unquote(base).datatype?(unquote(datatype)) == true
Enum.each(@valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
literal = unquote(datatype).new(input) literal = unquote(datatype).new(input)
assert (unquote(base).datatype?(literal)) == true assert unquote(base).datatype?(literal) == true
assert (unquote(base).datatype?(literal.literal)) == true assert unquote(base).datatype?(literal.literal) == true
end) end)
end end
test "on the base primitive datatype" do test "on the base primitive datatype" do
assert (unquote(base_primitive).datatype?(unquote(datatype))) == true assert unquote(base_primitive).datatype?(unquote(datatype)) == true
Enum.each(@valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
literal = unquote(datatype).new(input) literal = unquote(datatype).new(input)
assert (unquote(base_primitive).datatype?(literal)) == true assert unquote(base_primitive).datatype?(literal) == true
assert (unquote(base_primitive).datatype?(literal.literal)) == true assert unquote(base_primitive).datatype?(literal.literal) == true
end) end)
end end
end end
end end
test "datatype_id/1" do test "datatype_id/1" do
Enum.each(@valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert (unquote(datatype).new(input) |> unquote(datatype).datatype_id()) == RDF.iri(unquote(datatype_iri)) assert unquote(datatype).new(input) |> unquote(datatype).datatype_id() ==
RDF.iri(unquote(datatype_iri))
end) end)
end end
test "language/1" do test "language/1" do
Enum.each(@valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert (unquote(datatype).new(input) |> unquote(datatype).language()) == nil assert unquote(datatype).new(input) |> unquote(datatype).language() == nil
end) end)
end end
@ -288,9 +290,9 @@ defmodule RDF.XSD.Datatype.Test.Case do
@tag example: %{input: input, canonicalized: canonicalized} @tag example: %{input: input, canonicalized: canonicalized}
test "canonical? for #{unquote(datatype)} #{inspect(input)}", %{example: example} do test "canonical? for #{unquote(datatype)} #{inspect(input)}", %{example: example} do
literal = unquote(datatype).new(example.input) literal = unquote(datatype).new(example.input)
assert unquote(datatype).canonical?(literal) == (
unquote(datatype).lexical(literal) ==example.canonicalized assert unquote(datatype).canonical?(literal) ==
) (unquote(datatype).lexical(literal) == example.canonicalized)
end end
end) end)

View File

@ -5,40 +5,38 @@ defmodule RDF.BlankNode.IncrementTest do
alias RDF.BlankNode.{Generator, Increment} alias RDF.BlankNode.{Generator, Increment}
describe "generate/1" do describe "generate/1" do
test "without prefix" do test "without prefix" do
assert Increment.generate(%{counter: 0, map: %{}}) == assert Increment.generate(%{counter: 0, map: %{}}) ==
{bnode(0), (%{counter: 1, map: %{}})} {bnode(0), %{counter: 1, map: %{}}}
end end
test "with prefix" do test "with prefix" do
assert Increment.generate(%{counter: 0, map: %{}, prefix: "b"}) == assert Increment.generate(%{counter: 0, map: %{}, prefix: "b"}) ==
{bnode("b0"), (%{counter: 1, map: %{}, prefix: "b"})} {bnode("b0"), %{counter: 1, map: %{}, prefix: "b"}}
end end
end end
describe "generate_for/2" do describe "generate_for/2" do
test "when the given string not exists in the map" do test "when the given string not exists in the map" do
assert Increment.generate_for("bar", %{counter: 1, map: %{"foo" => 0}}) == assert Increment.generate_for("bar", %{counter: 1, map: %{"foo" => 0}}) ==
{bnode(1), (%{counter: 2, map: %{"foo" => 0, "bar" => 1}})} {bnode(1), %{counter: 2, map: %{"foo" => 0, "bar" => 1}}}
end end
test "when the given string exists in the map" do test "when the given string exists in the map" do
assert Increment.generate_for("foo", %{counter: 1, map: %{"foo" => 0}}) == assert Increment.generate_for("foo", %{counter: 1, map: %{"foo" => 0}}) ==
{bnode(0), (%{counter: 1, map: %{"foo" => 0}})} {bnode(0), %{counter: 1, map: %{"foo" => 0}}}
end end
test "with prefix" do test "with prefix" do
assert Increment.generate_for("bar", %{counter: 1, map: %{"foo" => 0}, prefix: "b"}) == assert Increment.generate_for("bar", %{counter: 1, map: %{"foo" => 0}, prefix: "b"}) ==
{bnode("b1"), (%{counter: 2, map: %{"foo" => 0, "bar" => 1}, prefix: "b"})} {bnode("b1"), %{counter: 2, map: %{"foo" => 0, "bar" => 1}, prefix: "b"}}
assert Increment.generate_for("foo", %{counter: 1, map: %{"foo" => 0}, prefix: "b"}) == assert Increment.generate_for("foo", %{counter: 1, map: %{"foo" => 0}, prefix: "b"}) ==
{bnode("b0"), (%{counter: 1, map: %{"foo" => 0}, prefix: "b"})} {bnode("b0"), %{counter: 1, map: %{"foo" => 0}, prefix: "b"}}
end end
end end
test "generator without prefix" do test "generator without prefix" do
{:ok, generator} = Generator.start_link(Increment) {:ok, generator} = Generator.start_link(Increment)
@ -77,5 +75,4 @@ defmodule RDF.BlankNode.IncrementTest do
assert Generator.generate_for(generator, {:foo, 42}) == bnode("b2") assert Generator.generate_for(generator, {:foo, 42}) == bnode("b2")
assert Generator.generate(generator) == bnode("b6") assert Generator.generate(generator) == bnode("b6")
end end
end end

View File

@ -7,88 +7,92 @@ defmodule RDF.DataTest do
|> EX.p1(EX.O1, EX.O2) |> EX.p1(EX.O1, EX.O2)
|> EX.p2(EX.O3) |> EX.p2(EX.O3)
|> EX.p3(~B<foo>, ~L"bar") |> EX.p3(~B<foo>, ~L"bar")
graph = graph =
Graph.new Graph.new()
|> Graph.add(description) |> Graph.add(description)
|> Graph.add( |> Graph.add(
EX.S2 EX.S2
|> EX.p2(EX.O3, EX.O4) |> EX.p2(EX.O3, EX.O4)
) )
named_graph = %Graph{graph | name: iri(EX.NamedGraph)} named_graph = %Graph{graph | name: iri(EX.NamedGraph)}
dataset = dataset =
Dataset.new Dataset.new()
|> Dataset.add(graph) |> Dataset.add(graph)
|> Dataset.add( |> Dataset.add(
Graph.new(name: EX.NamedGraph) Graph.new(name: EX.NamedGraph)
|> Graph.add(description) |> Graph.add(description)
|> Graph.add({EX.S3, EX.p3, EX.O5}) |> Graph.add({EX.S3, EX.p3(), EX.O5})
|> Graph.add({EX.S, EX.p3, EX.O5})) |> Graph.add({EX.S, EX.p3(), EX.O5})
{:ok, )
description: description,
graph: graph, named_graph: named_graph,
dataset: dataset,
}
end
{:ok, description: description, graph: graph, named_graph: named_graph, dataset: dataset}
end
describe "RDF.Data protocol implementation of RDF.Description" do describe "RDF.Data protocol implementation of RDF.Description" do
test "merge of a single triple with different subject", %{description: description} do test "merge of a single triple with different subject", %{description: description} do
assert RDF.Data.merge(description, {EX.Other, EX.p1, EX.O3}) == assert RDF.Data.merge(description, {EX.Other, EX.p1(), EX.O3}) ==
Graph.new(description) |> Graph.add({EX.Other, EX.p1, EX.O3}) Graph.new(description) |> Graph.add({EX.Other, EX.p1(), EX.O3})
end end
test "merge of a single triple with same subject", %{description: description} do test "merge of a single triple with same subject", %{description: description} do
assert RDF.Data.merge(description, {EX.S, EX.p1, EX.O3}) == assert RDF.Data.merge(description, {EX.S, EX.p1(), EX.O3}) ==
Description.add(description, {EX.S, EX.p1, EX.O3}) Description.add(description, {EX.S, EX.p1(), EX.O3})
end end
test "merge of a single quad", %{description: description} do test "merge of a single quad", %{description: description} do
assert RDF.Data.merge(description, {EX.Other, EX.p1, EX.O3, EX.Graph}) == assert RDF.Data.merge(description, {EX.Other, EX.p1(), EX.O3, EX.Graph}) ==
Dataset.new(description) |> Dataset.add({EX.Other, EX.p1, EX.O3, EX.Graph}) Dataset.new(description) |> Dataset.add({EX.Other, EX.p1(), EX.O3, EX.Graph})
assert RDF.Data.merge(description, {EX.S, EX.p1, EX.O3, EX.Graph}) ==
Dataset.new(description) |> Dataset.add({EX.S, EX.p1, EX.O3, EX.Graph}) assert RDF.Data.merge(description, {EX.S, EX.p1(), EX.O3, EX.Graph}) ==
Dataset.new(description) |> Dataset.add({EX.S, EX.p1(), EX.O3, EX.Graph})
end end
test "merge of a description with different subject", %{description: description} do test "merge of a description with different subject", %{description: description} do
assert RDF.Data.merge(description, Description.new({EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(description, Description.new({EX.Other, EX.p1(), EX.O3})) ==
Graph.new(description) |> Graph.add({EX.Other, EX.p1, EX.O3}) Graph.new(description) |> Graph.add({EX.Other, EX.p1(), EX.O3})
end end
test "merge of a description with same subject", %{description: description} do test "merge of a description with same subject", %{description: description} do
assert RDF.Data.merge(description, Description.new({EX.S, EX.p1, EX.O3})) == assert RDF.Data.merge(description, Description.new({EX.S, EX.p1(), EX.O3})) ==
Description.add(description, {EX.S, EX.p1, EX.O3}) Description.add(description, {EX.S, EX.p1(), EX.O3})
end end
test "merge of a graph", %{graph: graph} do test "merge of a graph", %{graph: graph} do
assert RDF.Data.merge(Description.new({EX.Other, EX.p1, EX.O3}), graph) == assert RDF.Data.merge(Description.new({EX.Other, EX.p1(), EX.O3}), graph) ==
Graph.add(graph, {EX.Other, EX.p1, EX.O3}) Graph.add(graph, {EX.Other, EX.p1(), EX.O3})
end end
test "merge of a dataset", %{dataset: dataset} do test "merge of a dataset", %{dataset: dataset} do
assert RDF.Data.merge(Description.new({EX.Other, EX.p1, EX.O3}), dataset) == assert RDF.Data.merge(Description.new({EX.Other, EX.p1(), EX.O3}), dataset) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
end end
test "delete", %{description: description} do test "delete", %{description: description} do
assert RDF.Data.delete(description, {EX.S, EX.p1, EX.O2}) == assert RDF.Data.delete(description, {EX.S, EX.p1(), EX.O2}) ==
Description.delete(description, {EX.S, EX.p1, EX.O2}) Description.delete(description, {EX.S, EX.p1(), EX.O2})
assert RDF.Data.delete(description, {EX.Other, EX.p1, EX.O2}) == description
assert RDF.Data.delete(description, {EX.Other, EX.p1(), EX.O2}) == description
end end
test "deleting a Description with a different subject does nothing", %{description: description} do test "deleting a Description with a different subject does nothing", %{
assert RDF.Data.delete(description, description: description
%Description{description | subject: EX.Other}) == description } do
assert RDF.Data.delete(
description,
%Description{description | subject: EX.Other}
) == description
end end
test "pop", %{description: description} do test "pop", %{description: description} do
assert RDF.Data.pop(description) == Description.pop(description) assert RDF.Data.pop(description) == Description.pop(description)
end end
test "include?", %{description: description} do test "include?", %{description: description} do
assert RDF.Data.include?(description, {EX.S, EX.p1, EX.O2}) assert RDF.Data.include?(description, {EX.S, EX.p1(), EX.O2})
refute RDF.Data.include?(description, {EX.Other, EX.p1, EX.O2}) refute RDF.Data.include?(description, {EX.Other, EX.p1(), EX.O2})
end end
test "describes?", %{description: description} do test "describes?", %{description: description} do
@ -97,14 +101,14 @@ defmodule RDF.DataTest do
end end
test "description when the requested subject matches the Description.subject", test "description when the requested subject matches the Description.subject",
%{description: description} do %{description: description} do
assert RDF.Data.description(description, description.subject) == description assert RDF.Data.description(description, description.subject) == description
assert RDF.Data.description(description, to_string(description.subject)) == description assert RDF.Data.description(description, to_string(description.subject)) == description
assert RDF.Data.description(description, EX.S) == description assert RDF.Data.description(description, EX.S) == description
end end
test "description when the requested subject does not match the Description.subject", test "description when the requested subject does not match the Description.subject",
%{description: description} do %{description: description} do
assert RDF.Data.description(description, iri(EX.Other)) == Description.new(EX.Other) assert RDF.Data.description(description, iri(EX.Other)) == Description.new(EX.Other)
end end
@ -121,17 +125,26 @@ defmodule RDF.DataTest do
end end
test "predicates", %{description: description} do test "predicates", %{description: description} do
assert RDF.Data.predicates(description) == MapSet.new([EX.p1, EX.p2, EX.p3]) assert RDF.Data.predicates(description) == MapSet.new([EX.p1(), EX.p2(), EX.p3()])
end end
test "objects", %{description: description} do test "objects", %{description: description} do
assert RDF.Data.objects(description) == assert RDF.Data.objects(description) ==
MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), ~B<foo>]) MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), ~B<foo>])
end end
test "resources", %{description: description} do test "resources", %{description: description} do
assert RDF.Data.resources(description) == assert RDF.Data.resources(description) ==
MapSet.new([iri(EX.S), EX.p1, EX.p2, EX.p3, iri(EX.O1), iri(EX.O2), iri(EX.O3), ~B<foo>]) MapSet.new([
iri(EX.S),
EX.p1(),
EX.p2(),
EX.p3(),
iri(EX.O1),
iri(EX.O2),
iri(EX.O3),
~B<foo>
])
end end
test "subject_count", %{description: description} do test "subject_count", %{description: description} do
@ -145,12 +158,12 @@ defmodule RDF.DataTest do
test "values", %{description: description} do test "values", %{description: description} do
assert RDF.Data.values(description) == assert RDF.Data.values(description) ==
%{ %{
RDF.Term.value(EX.p1) => [ RDF.Term.value(EX.p1()) => [
RDF.Term.value(RDF.iri(EX.O1)), RDF.Term.value(RDF.iri(EX.O1)),
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
RDF.Term.value(EX.p2) => [RDF.Term.value(RDF.iri(EX.O3))], RDF.Term.value(EX.p2()) => [RDF.Term.value(RDF.iri(EX.O3))],
RDF.Term.value(EX.p3) => ["_:foo", "bar"], RDF.Term.value(EX.p3()) => ["_:foo", "bar"]
} }
end end
@ -166,71 +179,79 @@ defmodule RDF.DataTest do
end end
end end
describe "RDF.Data protocol implementation of RDF.Graph" do describe "RDF.Data protocol implementation of RDF.Graph" do
test "merge of a single triple", %{graph: graph, named_graph: named_graph} do test "merge of a single triple", %{graph: graph, named_graph: named_graph} do
assert RDF.Data.merge(graph, {EX.Other, EX.p, EX.O}) == assert RDF.Data.merge(graph, {EX.Other, EX.p(), EX.O}) ==
Graph.add(graph, {EX.Other, EX.p, EX.O}) Graph.add(graph, {EX.Other, EX.p(), EX.O})
assert RDF.Data.merge(named_graph, {EX.Other, EX.p, EX.O}) ==
Graph.add(named_graph, {EX.Other, EX.p, EX.O}) assert RDF.Data.merge(named_graph, {EX.Other, EX.p(), EX.O}) ==
Graph.add(named_graph, {EX.Other, EX.p(), EX.O})
end end
test "merge of a single quad with the same graph context", test "merge of a single quad with the same graph context",
%{graph: graph, named_graph: named_graph} do %{graph: graph, named_graph: named_graph} do
assert RDF.Data.merge(graph, {EX.Other, EX.p, EX.O, nil}) == assert RDF.Data.merge(graph, {EX.Other, EX.p(), EX.O, nil}) ==
Graph.add(graph, {EX.Other, EX.p, EX.O}) Graph.add(graph, {EX.Other, EX.p(), EX.O})
assert RDF.Data.merge(named_graph, {EX.Other, EX.p, EX.O, EX.NamedGraph}) ==
Graph.add(named_graph, {EX.Other, EX.p, EX.O}) assert RDF.Data.merge(named_graph, {EX.Other, EX.p(), EX.O, EX.NamedGraph}) ==
Graph.add(named_graph, {EX.Other, EX.p(), EX.O})
end end
test "merge of a single quad with a different graph context", test "merge of a single quad with a different graph context",
%{graph: graph, named_graph: named_graph} do %{graph: graph, named_graph: named_graph} do
assert RDF.Data.merge(graph, {EX.S, EX.p1, EX.O3, EX.NamedGraph}) == assert RDF.Data.merge(graph, {EX.S, EX.p1(), EX.O3, EX.NamedGraph}) ==
Dataset.new(graph) |> Dataset.add({EX.S, EX.p1, EX.O3, EX.NamedGraph}) Dataset.new(graph) |> Dataset.add({EX.S, EX.p1(), EX.O3, EX.NamedGraph})
assert RDF.Data.merge(named_graph, {EX.S, EX.p1, EX.O3, nil}) ==
Dataset.new(named_graph) |> Dataset.add({EX.S, EX.p1, EX.O3, nil}) assert RDF.Data.merge(named_graph, {EX.S, EX.p1(), EX.O3, nil}) ==
Dataset.new(named_graph) |> Dataset.add({EX.S, EX.p1(), EX.O3, nil})
end end
test "merge of a description", %{graph: graph} do test "merge of a description", %{graph: graph} do
assert RDF.Data.merge(graph, Description.new({EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(graph, Description.new({EX.Other, EX.p1(), EX.O3})) ==
Graph.add(graph, {EX.Other, EX.p1, EX.O3}) Graph.add(graph, {EX.Other, EX.p1(), EX.O3})
assert RDF.Data.merge(graph, Description.new({EX.S, EX.p1, EX.O3})) ==
Graph.add(graph, {EX.S, EX.p1, EX.O3}) assert RDF.Data.merge(graph, Description.new({EX.S, EX.p1(), EX.O3})) ==
Graph.add(graph, {EX.S, EX.p1(), EX.O3})
end end
test "merge of a graph with the same name", test "merge of a graph with the same name",
%{graph: graph, named_graph: named_graph} do %{graph: graph, named_graph: named_graph} do
assert RDF.Data.merge(graph, Graph.add(graph, {EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(graph, Graph.add(graph, {EX.Other, EX.p1(), EX.O3})) ==
Graph.add(graph, {EX.Other, EX.p1, EX.O3}) Graph.add(graph, {EX.Other, EX.p1(), EX.O3})
assert RDF.Data.merge(named_graph, Graph.add(named_graph, {EX.Other, EX.p1, EX.O3})) ==
Graph.add(named_graph, {EX.Other, EX.p1, EX.O3}) assert RDF.Data.merge(named_graph, Graph.add(named_graph, {EX.Other, EX.p1(), EX.O3})) ==
Graph.add(named_graph, {EX.Other, EX.p1(), EX.O3})
end end
test "merge of a graph with a different name", test "merge of a graph with a different name",
%{graph: graph, named_graph: named_graph} do %{graph: graph, named_graph: named_graph} do
assert RDF.Data.merge(graph, named_graph) == assert RDF.Data.merge(graph, named_graph) ==
Dataset.new(graph) |> Dataset.add(named_graph) Dataset.new(graph) |> Dataset.add(named_graph)
assert RDF.Data.merge(named_graph, graph) == assert RDF.Data.merge(named_graph, graph) ==
Dataset.new(named_graph) |> Dataset.add(graph) Dataset.new(named_graph) |> Dataset.add(graph)
end end
test "merge of a dataset", %{dataset: dataset} do test "merge of a dataset", %{dataset: dataset} do
assert RDF.Data.merge(Graph.new({EX.Other, EX.p1, EX.O3}), dataset) == assert RDF.Data.merge(Graph.new({EX.Other, EX.p1(), EX.O3}), dataset) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
assert RDF.Data.merge(Graph.new({EX.Other, EX.p1, EX.O3}, name: EX.NamedGraph), dataset) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3, EX.NamedGraph}) assert RDF.Data.merge(Graph.new({EX.Other, EX.p1(), EX.O3}, name: EX.NamedGraph), dataset) ==
Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3, EX.NamedGraph})
end end
test "delete", %{graph: graph} do test "delete", %{graph: graph} do
assert RDF.Data.delete(graph, {EX.S, EX.p1, EX.O2}) == assert RDF.Data.delete(graph, {EX.S, EX.p1(), EX.O2}) ==
Graph.delete(graph, {EX.S, EX.p1, EX.O2}) Graph.delete(graph, {EX.S, EX.p1(), EX.O2})
assert RDF.Data.delete(graph, {EX.Other, EX.p1, EX.O2}) == graph
assert RDF.Data.delete(graph, {EX.Other, EX.p1(), EX.O2}) == graph
end end
test "deleting a Graph with a different name does nothing", %{graph: graph} do test "deleting a Graph with a different name does nothing", %{graph: graph} do
assert RDF.Data.delete(graph, assert RDF.Data.delete(
%Graph{graph | name: EX.OtherGraph}) == graph graph,
%Graph{graph | name: EX.OtherGraph}
) == graph
end end
test "pop", %{graph: graph} do test "pop", %{graph: graph} do
@ -238,9 +259,9 @@ defmodule RDF.DataTest do
end end
test "include?", %{graph: graph} do test "include?", %{graph: graph} do
assert RDF.Data.include?(graph, {EX.S, EX.p1, EX.O2}) assert RDF.Data.include?(graph, {EX.S, EX.p1(), EX.O2})
assert RDF.Data.include?(graph, {EX.S2, EX.p2, EX.O3}) assert RDF.Data.include?(graph, {EX.S2, EX.p2(), EX.O3})
refute RDF.Data.include?(graph, {EX.Other, EX.p1, EX.O2}) refute RDF.Data.include?(graph, {EX.Other, EX.p1(), EX.O2})
end end
test "describes?", %{graph: graph} do test "describes?", %{graph: graph} do
@ -250,7 +271,7 @@ defmodule RDF.DataTest do
end end
test "description when a description is present", test "description when a description is present",
%{graph: graph, description: description} do %{graph: graph, description: description} do
assert RDF.Data.description(graph, iri(EX.S)) == description assert RDF.Data.description(graph, iri(EX.S)) == description
assert RDF.Data.description(graph, EX.S) == description assert RDF.Data.description(graph, EX.S) == description
end end
@ -261,7 +282,7 @@ defmodule RDF.DataTest do
test "descriptions", %{graph: graph, description: description} do test "descriptions", %{graph: graph, description: description} do
assert RDF.Data.descriptions(graph) == assert RDF.Data.descriptions(graph) ==
[description, EX.S2 |> EX.p2(EX.O3, EX.O4)] [description, EX.S2 |> EX.p2(EX.O3, EX.O4)]
end end
test "statements", %{graph: graph} do test "statements", %{graph: graph} do
@ -273,19 +294,28 @@ defmodule RDF.DataTest do
end end
test "predicates", %{graph: graph} do test "predicates", %{graph: graph} do
assert RDF.Data.predicates(graph) == MapSet.new([EX.p1, EX.p2, EX.p3]) assert RDF.Data.predicates(graph) == MapSet.new([EX.p1(), EX.p2(), EX.p3()])
end end
test "objects", %{graph: graph} do test "objects", %{graph: graph} do
assert RDF.Data.objects(graph) == assert RDF.Data.objects(graph) ==
MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), ~B<foo>]) MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), ~B<foo>])
end end
test "resources", %{graph: graph} do test "resources", %{graph: graph} do
assert RDF.Data.resources(graph) == MapSet.new([ assert RDF.Data.resources(graph) ==
iri(EX.S), iri(EX.S2), EX.p1, EX.p2, EX.p3, MapSet.new([
iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), ~B<foo> iri(EX.S),
]) iri(EX.S2),
EX.p1(),
EX.p2(),
EX.p3(),
iri(EX.O1),
iri(EX.O2),
iri(EX.O3),
iri(EX.O4),
~B<foo>
])
end end
test "subject_count", %{graph: graph} do test "subject_count", %{graph: graph} do
@ -300,19 +330,19 @@ defmodule RDF.DataTest do
assert RDF.Data.values(graph) == assert RDF.Data.values(graph) ==
%{ %{
RDF.Term.value(RDF.iri(EX.S)) => %{ RDF.Term.value(RDF.iri(EX.S)) => %{
RDF.Term.value(EX.p1) => [ RDF.Term.value(EX.p1()) => [
RDF.Term.value(RDF.iri(EX.O1)), RDF.Term.value(RDF.iri(EX.O1)),
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
RDF.Term.value(EX.p2) => [RDF.Term.value(RDF.iri(EX.O3))], RDF.Term.value(EX.p2()) => [RDF.Term.value(RDF.iri(EX.O3))],
RDF.Term.value(EX.p3) => ["_:foo", "bar"], RDF.Term.value(EX.p3()) => ["_:foo", "bar"]
}, },
RDF.Term.value(RDF.iri(EX.S2)) => %{ RDF.Term.value(RDF.iri(EX.S2)) => %{
RDF.Term.value(EX.p2) => [ RDF.Term.value(EX.p2()) => [
RDF.Term.value(RDF.iri(EX.O3)), RDF.Term.value(RDF.iri(EX.O3)),
RDF.Term.value(RDF.iri(EX.O4)) RDF.Term.value(RDF.iri(EX.O4))
], ]
}, }
} }
end end
@ -330,51 +360,59 @@ defmodule RDF.DataTest do
end end
end end
describe "RDF.Data protocol implementation of RDF.Dataset" do describe "RDF.Data protocol implementation of RDF.Dataset" do
test "merge of a single triple", %{dataset: dataset} do test "merge of a single triple", %{dataset: dataset} do
assert RDF.Data.merge(dataset, {EX.Other, EX.p, EX.O}) == assert RDF.Data.merge(dataset, {EX.Other, EX.p(), EX.O}) ==
Dataset.add(dataset, {EX.Other, EX.p, EX.O}) Dataset.add(dataset, {EX.Other, EX.p(), EX.O})
end end
test "merge of a single quad", %{dataset: dataset} do test "merge of a single quad", %{dataset: dataset} do
assert RDF.Data.merge(dataset, {EX.Other, EX.p, EX.O, nil}) == assert RDF.Data.merge(dataset, {EX.Other, EX.p(), EX.O, nil}) ==
Dataset.add(dataset, {EX.Other, EX.p, EX.O}) Dataset.add(dataset, {EX.Other, EX.p(), EX.O})
assert RDF.Data.merge(dataset, {EX.Other, EX.p, EX.O, EX.NamedGraph}) ==
Dataset.add(dataset, {EX.Other, EX.p, EX.O, EX.NamedGraph}) assert RDF.Data.merge(dataset, {EX.Other, EX.p(), EX.O, EX.NamedGraph}) ==
Dataset.add(dataset, {EX.Other, EX.p(), EX.O, EX.NamedGraph})
end end
test "merge of a description", %{dataset: dataset} do test "merge of a description", %{dataset: dataset} do
assert RDF.Data.merge(dataset, Description.new({EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(dataset, Description.new({EX.Other, EX.p1(), EX.O3})) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
end end
test "merge of a graph", %{dataset: dataset} do test "merge of a graph", %{dataset: dataset} do
assert RDF.Data.merge(dataset, Graph.new({EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(dataset, Graph.new({EX.Other, EX.p1(), EX.O3})) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
assert RDF.Data.merge(dataset, Graph.new({EX.Other, EX.p1, EX.O3}, name: EX.NamedGraph)) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3, EX.NamedGraph}) assert RDF.Data.merge(dataset, Graph.new({EX.Other, EX.p1(), EX.O3}, name: EX.NamedGraph)) ==
Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3, EX.NamedGraph})
end end
test "merge of a dataset", %{dataset: dataset} do test "merge of a dataset", %{dataset: dataset} do
assert RDF.Data.merge(dataset, Dataset.new({EX.Other, EX.p1, EX.O3})) == assert RDF.Data.merge(dataset, Dataset.new({EX.Other, EX.p1(), EX.O3})) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
assert RDF.Data.merge(dataset, Dataset.new({EX.Other, EX.p1, EX.O3}, name: EX.NamedDataset)) ==
Dataset.add(dataset, {EX.Other, EX.p1, EX.O3}) assert RDF.Data.merge(
dataset,
Dataset.new({EX.Other, EX.p1(), EX.O3}, name: EX.NamedDataset)
) ==
Dataset.add(dataset, {EX.Other, EX.p1(), EX.O3})
end end
test "delete", %{dataset: dataset} do test "delete", %{dataset: dataset} do
assert RDF.Data.delete(dataset, {EX.S, EX.p1, EX.O2}) == assert RDF.Data.delete(dataset, {EX.S, EX.p1(), EX.O2}) ==
Dataset.delete(dataset, {EX.S, EX.p1, EX.O2}) Dataset.delete(dataset, {EX.S, EX.p1(), EX.O2})
assert RDF.Data.delete(dataset, {EX.S3, EX.p3, EX.O5, EX.NamedGraph}) ==
Dataset.delete(dataset, {EX.S3, EX.p3, EX.O5, EX.NamedGraph}) assert RDF.Data.delete(dataset, {EX.S3, EX.p3(), EX.O5, EX.NamedGraph}) ==
assert RDF.Data.delete(dataset, {EX.Other, EX.p1, EX.O2}) == dataset Dataset.delete(dataset, {EX.S3, EX.p3(), EX.O5, EX.NamedGraph})
assert RDF.Data.delete(dataset, {EX.Other, EX.p1(), EX.O2}) == dataset
end end
test "deleting a Dataset with a different name does nothing", %{dataset: dataset} do test "deleting a Dataset with a different name does nothing", %{dataset: dataset} do
assert RDF.Data.delete(dataset, assert RDF.Data.delete(
%Dataset{dataset | name: EX.OtherDataset}) == dataset dataset,
%Dataset{dataset | name: EX.OtherDataset}
) == dataset
end end
test "pop", %{dataset: dataset} do test "pop", %{dataset: dataset} do
@ -382,10 +420,10 @@ defmodule RDF.DataTest do
end end
test "include?", %{dataset: dataset} do test "include?", %{dataset: dataset} do
assert RDF.Data.include?(dataset, {EX.S, EX.p1, EX.O2}) assert RDF.Data.include?(dataset, {EX.S, EX.p1(), EX.O2})
assert RDF.Data.include?(dataset, {EX.S2, EX.p2, EX.O3}) assert RDF.Data.include?(dataset, {EX.S2, EX.p2(), EX.O3})
assert RDF.Data.include?(dataset, {EX.S3, EX.p3, EX.O5, EX.NamedGraph}) assert RDF.Data.include?(dataset, {EX.S3, EX.p3(), EX.O5, EX.NamedGraph})
refute RDF.Data.include?(dataset, {EX.Other, EX.p1, EX.O2}) refute RDF.Data.include?(dataset, {EX.Other, EX.p1(), EX.O2})
end end
test "describes?", %{dataset: dataset} do test "describes?", %{dataset: dataset} do
@ -396,8 +434,8 @@ defmodule RDF.DataTest do
end end
test "description when a description is present", test "description when a description is present",
%{dataset: dataset, description: description} do %{dataset: dataset, description: description} do
description_aggregate = Description.add(description, {EX.S, EX.p3, EX.O5}) description_aggregate = Description.add(description, {EX.S, EX.p3(), EX.O5})
assert RDF.Data.description(dataset, iri(EX.S)) == description_aggregate assert RDF.Data.description(dataset, iri(EX.S)) == description_aggregate
assert RDF.Data.description(dataset, EX.S) == description_aggregate assert RDF.Data.description(dataset, EX.S) == description_aggregate
end end
@ -407,12 +445,13 @@ defmodule RDF.DataTest do
end end
test "descriptions", %{dataset: dataset, description: description} do test "descriptions", %{dataset: dataset, description: description} do
description_aggregate = Description.add(description, {EX.S, EX.p3, EX.O5}) description_aggregate = Description.add(description, {EX.S, EX.p3(), EX.O5})
assert RDF.Data.descriptions(dataset) == [ assert RDF.Data.descriptions(dataset) == [
description_aggregate, description_aggregate,
(EX.S2 |> EX.p2(EX.O3, EX.O4)), EX.S2 |> EX.p2(EX.O3, EX.O4),
(EX.S3 |> EX.p3(EX.O5)) EX.S3 |> EX.p3(EX.O5)
] ]
end end
test "statements", %{dataset: dataset} do test "statements", %{dataset: dataset} do
@ -424,19 +463,30 @@ defmodule RDF.DataTest do
end end
test "predicates", %{dataset: dataset} do test "predicates", %{dataset: dataset} do
assert RDF.Data.predicates(dataset) == MapSet.new([EX.p1, EX.p2, EX.p3]) assert RDF.Data.predicates(dataset) == MapSet.new([EX.p1(), EX.p2(), EX.p3()])
end end
test "objects", %{dataset: dataset} do test "objects", %{dataset: dataset} do
assert RDF.Data.objects(dataset) == assert RDF.Data.objects(dataset) ==
MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), iri(EX.O5), ~B<foo>]) MapSet.new([iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), iri(EX.O5), ~B<foo>])
end end
test "resources", %{dataset: dataset} do test "resources", %{dataset: dataset} do
assert RDF.Data.resources(dataset) == MapSet.new([ assert RDF.Data.resources(dataset) ==
iri(EX.S), iri(EX.S2), iri(EX.S3), EX.p1, EX.p2, EX.p3, MapSet.new([
iri(EX.O1), iri(EX.O2), iri(EX.O3), iri(EX.O4), iri(EX.O5), ~B<foo> iri(EX.S),
]) iri(EX.S2),
iri(EX.S3),
EX.p1(),
EX.p2(),
EX.p3(),
iri(EX.O1),
iri(EX.O2),
iri(EX.O3),
iri(EX.O4),
iri(EX.O5),
~B<foo>
])
end end
test "subject_count", %{dataset: dataset} do test "subject_count", %{dataset: dataset} do
@ -452,34 +502,34 @@ defmodule RDF.DataTest do
%{ %{
nil => %{ nil => %{
RDF.Term.value(RDF.iri(EX.S)) => %{ RDF.Term.value(RDF.iri(EX.S)) => %{
RDF.Term.value(EX.p1) => [ RDF.Term.value(EX.p1()) => [
RDF.Term.value(RDF.iri(EX.O1)), RDF.Term.value(RDF.iri(EX.O1)),
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
RDF.Term.value(EX.p2) => [RDF.Term.value(RDF.iri(EX.O3))], RDF.Term.value(EX.p2()) => [RDF.Term.value(RDF.iri(EX.O3))],
RDF.Term.value(EX.p3) => ["_:foo", "bar"], RDF.Term.value(EX.p3()) => ["_:foo", "bar"]
}, },
RDF.Term.value(RDF.iri(EX.S2)) => %{ RDF.Term.value(RDF.iri(EX.S2)) => %{
RDF.Term.value(EX.p2) => [ RDF.Term.value(EX.p2()) => [
RDF.Term.value(RDF.iri(EX.O3)), RDF.Term.value(RDF.iri(EX.O3)),
RDF.Term.value(RDF.iri(EX.O4)) RDF.Term.value(RDF.iri(EX.O4))
], ]
}, }
}, },
RDF.Term.value(RDF.iri(EX.NamedGraph)) => %{ RDF.Term.value(RDF.iri(EX.NamedGraph)) => %{
RDF.Term.value(RDF.iri(EX.S)) => %{ RDF.Term.value(RDF.iri(EX.S)) => %{
RDF.Term.value(EX.p1) => [ RDF.Term.value(EX.p1()) => [
RDF.Term.value(RDF.iri(EX.O1)), RDF.Term.value(RDF.iri(EX.O1)),
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
RDF.Term.value(EX.p2) => [RDF.Term.value(RDF.iri(EX.O3))], RDF.Term.value(EX.p2()) => [RDF.Term.value(RDF.iri(EX.O3))],
RDF.Term.value(EX.p3) => ["_:foo", "bar", RDF.Term.value(RDF.iri(EX.O5))], RDF.Term.value(EX.p3()) => ["_:foo", "bar", RDF.Term.value(RDF.iri(EX.O5))]
}, },
RDF.Term.value(RDF.iri(EX.S3)) => %{ RDF.Term.value(RDF.iri(EX.S3)) => %{
RDF.Term.value(EX.p3) => [ RDF.Term.value(EX.p3()) => [
RDF.Term.value(RDF.iri(EX.O5)) RDF.Term.value(RDF.iri(EX.O5))
], ]
}, }
} }
} }
end end
@ -488,8 +538,10 @@ defmodule RDF.DataTest do
mapping = fn mapping = fn
{:graph_name, graph_name} -> {:graph_name, graph_name} ->
graph_name graph_name
{:predicate, predicate} -> {:predicate, predicate} ->
predicate |> to_string() |> String.split("/") |> List.last() |> String.to_atom() predicate |> to_string() |> String.split("/") |> List.last() |> String.to_atom()
{_, term} -> {_, term} ->
RDF.Term.value(term) RDF.Term.value(term)
end end
@ -503,14 +555,14 @@ defmodule RDF.DataTest do
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
p2: [RDF.Term.value(RDF.iri(EX.O3))], p2: [RDF.Term.value(RDF.iri(EX.O3))],
p3: ["_:foo", "bar"], p3: ["_:foo", "bar"]
}, },
RDF.Term.value(RDF.iri(EX.S2)) => %{ RDF.Term.value(RDF.iri(EX.S2)) => %{
p2: [ p2: [
RDF.Term.value(RDF.iri(EX.O3)), RDF.Term.value(RDF.iri(EX.O3)),
RDF.Term.value(RDF.iri(EX.O4)) RDF.Term.value(RDF.iri(EX.O4))
], ]
}, }
}, },
RDF.iri(EX.NamedGraph) => %{ RDF.iri(EX.NamedGraph) => %{
RDF.Term.value(RDF.iri(EX.S)) => %{ RDF.Term.value(RDF.iri(EX.S)) => %{
@ -519,13 +571,13 @@ defmodule RDF.DataTest do
RDF.Term.value(RDF.iri(EX.O2)) RDF.Term.value(RDF.iri(EX.O2))
], ],
p2: [RDF.Term.value(RDF.iri(EX.O3))], p2: [RDF.Term.value(RDF.iri(EX.O3))],
p3: ["_:foo", "bar", RDF.Term.value(RDF.iri(EX.O5))], p3: ["_:foo", "bar", RDF.Term.value(RDF.iri(EX.O5))]
}, },
RDF.Term.value(RDF.iri(EX.S3)) => %{ RDF.Term.value(RDF.iri(EX.S3)) => %{
p3: [ p3: [
RDF.Term.value(RDF.iri(EX.O5)) RDF.Term.value(RDF.iri(EX.O5))
], ]
}, }
} }
} }
end end
@ -536,20 +588,26 @@ defmodule RDF.DataTest do
assert RDF.Data.equal?(Dataset.new(description), description) assert RDF.Data.equal?(Dataset.new(description), description)
assert RDF.Data.equal?(Dataset.new(graph), graph) assert RDF.Data.equal?(Dataset.new(graph), graph)
assert RDF.Data.equal?(Dataset.new(graph), RDF.Graph.add_prefixes(graph, %{ex: EX})) assert RDF.Data.equal?(Dataset.new(graph), RDF.Graph.add_prefixes(graph, %{ex: EX}))
assert RDF.Data.equal?((Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph1, prefixes: %{ex: EX}))), assert RDF.Data.equal?(
(Dataset.new(graph) Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph1, prefixes: %{ex: RDF})))) |> Dataset.add(Graph.new(description, name: EX.Graph1, prefixes: %{ex: EX})),
Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph1, prefixes: %{ex: RDF}))
)
refute RDF.Data.equal?(dataset, dataset |> Dataset.delete_graph(EX.NamedGraph)) refute RDF.Data.equal?(dataset, dataset |> Dataset.delete_graph(EX.NamedGraph))
refute RDF.Data.equal?(dataset |> Dataset.delete_graph(EX.NamedGraph), dataset) refute RDF.Data.equal?(dataset |> Dataset.delete_graph(EX.NamedGraph), dataset)
refute RDF.Data.equal?((Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph1))), refute RDF.Data.equal?(
(Dataset.new(graph) Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph2)))) |> Dataset.add(Graph.new(description, name: EX.Graph1)),
Dataset.new(graph)
|> Dataset.add(Graph.new(description, name: EX.Graph2))
)
refute RDF.Data.equal?(dataset, description) refute RDF.Data.equal?(dataset, description)
refute RDF.Data.equal?(dataset, graph) refute RDF.Data.equal?(dataset, graph)
end end
end end
end end

File diff suppressed because it is too large Load Diff

View File

@ -6,192 +6,211 @@ defmodule RDF.Literal.GenericTest do
@valid %{ @valid %{
# input => { value , datatype } # input => { value , datatype }
"foo" => { "foo" , "http://example.com/datatype" }, "foo" => {"foo", "http://example.com/datatype"}
} }
describe "new" do describe "new" do
test "with value and datatype" do test "with value and datatype" do
Enum.each @valid, fn {input, {value, datatype}} -> Enum.each(@valid, fn {input, {value, datatype}} ->
assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} = assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} =
Generic.new(input, datatype: datatype) Generic.new(input, datatype: datatype)
assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} = assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} =
Generic.new(input, datatype: RDF.iri(datatype)) Generic.new(input, datatype: RDF.iri(datatype))
end end)
end end
test "with datatype directly" do test "with datatype directly" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
datatype_iri = RDF.iri(datatype) datatype_iri = RDF.iri(datatype)
assert Generic.new(input, datatype) == Generic.new(input, datatype: datatype) assert Generic.new(input, datatype) == Generic.new(input, datatype: datatype)
assert Generic.new(input, datatype_iri) == Generic.new(input, datatype: datatype_iri) assert Generic.new(input, datatype_iri) == Generic.new(input, datatype: datatype_iri)
end end)
end end
test "with datatype as a vocabulary term" do test "with datatype as a vocabulary term" do
datatype = EX.Datatype |> RDF.iri() |> to_string() datatype = EX.Datatype |> RDF.iri() |> to_string()
assert %Literal{literal: %Generic{value: "foo", datatype: ^datatype}} = assert %Literal{literal: %Generic{value: "foo", datatype: ^datatype}} =
Generic.new("foo", datatype: EX.Datatype) Generic.new("foo", datatype: EX.Datatype)
assert Generic.new("foo", EX.Datatype) == Generic.new("foo", datatype: EX.Datatype) assert Generic.new("foo", EX.Datatype) == Generic.new("foo", datatype: EX.Datatype)
end end
test "with canonicalize opts" do test "with canonicalize opts" do
Enum.each @valid, fn {input, {value, datatype}} -> Enum.each(@valid, fn {input, {value, datatype}} ->
assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} = assert %Literal{literal: %Generic{value: ^value, datatype: ^datatype}} =
Generic.new(input, datatype: datatype, canonicalize: true) Generic.new(input, datatype: datatype, canonicalize: true)
end end)
end end
test "without a datatype it produces an invalid literal" do test "without a datatype it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %Generic{value: ^value, datatype: nil}} = assert %Literal{literal: %Generic{value: ^value, datatype: nil}} =
literal = Generic.new(input, []) literal = Generic.new(input, [])
assert Generic.valid?(literal) == false assert Generic.valid?(literal) == false
end end)
end end
test "with nil as a datatype it produces an invalid literal" do test "with nil as a datatype it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %Generic{value: ^value, datatype: nil}} = assert %Literal{literal: %Generic{value: ^value, datatype: nil}} =
literal = Generic.new(input, datatype: nil) literal = Generic.new(input, datatype: nil)
assert Generic.valid?(literal) == false assert Generic.valid?(literal) == false
end end)
end end
test "with the empty string as a datatype it produces an invalid literal" do test "with the empty string as a datatype it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %Generic{value: ^value, datatype: nil}} = assert %Literal{literal: %Generic{value: ^value, datatype: nil}} =
literal = Generic.new(input, datatype: "") literal = Generic.new(input, datatype: "")
assert Generic.valid?(literal) == false assert Generic.valid?(literal) == false
end end)
end end
end end
describe "new!" do describe "new!" do
test "with valid values, it behaves the same as new" do test "with valid values, it behaves the same as new" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert Generic.new!(input, datatype: datatype) == assert Generic.new!(input, datatype: datatype) ==
Generic.new(input, datatype: datatype) Generic.new(input, datatype: datatype)
assert Generic.new!(input, datatype: datatype, canonicalize: true) == assert Generic.new!(input, datatype: datatype, canonicalize: true) ==
Generic.new(input, datatype: datatype, canonicalize: true) Generic.new(input, datatype: datatype, canonicalize: true)
end end)
end end
test "without a datatype it raises an error" do test "without a datatype it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> Generic.new!(input, []) end assert_raise ArgumentError, fn -> Generic.new!(input, []) end
end end)
end end
test "with nil as a datatype it raises an error" do test "with nil as a datatype it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> Generic.new!(input, datatype: nil) end assert_raise ArgumentError, fn -> Generic.new!(input, datatype: nil) end
end end)
end end
test "with the empty string as a datatype it raises an error" do test "with the empty string as a datatype it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> Generic.new!(input, datatype: "") end assert_raise ArgumentError, fn -> Generic.new!(input, datatype: "") end
end end)
end end
end end
test "datatype?/1" do test "datatype?/1" do
assert Generic.datatype?(Generic) == true assert Generic.datatype?(Generic) == true
Enum.each @valid, fn {input, {_, datatype}} ->
Enum.each(@valid, fn {input, {_, datatype}} ->
literal = Generic.new(input, datatype: datatype) literal = Generic.new(input, datatype: datatype)
assert Generic.datatype?(literal) == true assert Generic.datatype?(literal) == true
assert Generic.datatype?(literal.literal) == true assert Generic.datatype?(literal.literal) == true
end end)
end end
test "datatype_id/1" do test "datatype_id/1" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.datatype_id()) == RDF.iri(datatype) assert Generic.new(input, datatype: datatype) |> Generic.datatype_id() == RDF.iri(datatype)
end end)
end end
test "language/1" do test "language/1" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.language()) == nil assert Generic.new(input, datatype: datatype) |> Generic.language() == nil
end end)
end end
test "value/1" do test "value/1" do
Enum.each @valid, fn {input, {value, datatype}} -> Enum.each(@valid, fn {input, {value, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.value()) == value assert Generic.new(input, datatype: datatype) |> Generic.value() == value
end end)
end end
test "lexical/1" do test "lexical/1" do
Enum.each @valid, fn {input, {value, datatype}} -> Enum.each(@valid, fn {input, {value, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.lexical()) == value assert Generic.new(input, datatype: datatype) |> Generic.lexical() == value
end end)
end end
test "canonical/1" do test "canonical/1" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.canonical()) == assert Generic.new(input, datatype: datatype) |> Generic.canonical() ==
Generic.new(input, datatype: datatype) Generic.new(input, datatype: datatype)
end end)
end end
test "canonical?/1" do test "canonical?/1" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.canonical?()) == true assert Generic.new(input, datatype: datatype) |> Generic.canonical?() == true
end end)
end end
describe "valid?/1" do describe "valid?/1" do
test "with a datatype" do test "with a datatype" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.valid?()) == true assert Generic.new(input, datatype: datatype) |> Generic.valid?() == true
end end)
end end
test "without a datatype" do test "without a datatype" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert (Generic.new(input, datatype: nil) |> Generic.valid?()) == false assert Generic.new(input, datatype: nil) |> Generic.valid?() == false
assert (Generic.new(input, datatype: "") |> Generic.valid?()) == false assert Generic.new(input, datatype: "") |> Generic.valid?() == false
end end)
end end
end end
describe "cast/1" do describe "cast/1" do
test "always return nil (RDF.Literal.Generic does not support cast)" do test "always return nil (RDF.Literal.Generic does not support cast)" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert (Generic.new(input, datatype: datatype) |> Generic.cast()) == nil assert Generic.new(input, datatype: datatype) |> Generic.cast() == nil
end end)
end end
end end
test "equal_value?/2" do test "equal_value?/2" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert Generic.equal_value?( assert Generic.equal_value?(
Generic.new(input, datatype: datatype), Generic.new(input, datatype: datatype),
Generic.new(input, datatype: datatype)) == true Generic.new(input, datatype: datatype)
end ) == true
end)
assert Generic.equal_value?( assert Generic.equal_value?(
Generic.new("foo", datatype: "http://example.com/foo"), Generic.new("foo", datatype: "http://example.com/foo"),
Generic.new("foo", datatype: "http://example.com/bar")) == nil Generic.new("foo", datatype: "http://example.com/bar")
) == nil
assert Generic.equal_value?(Generic.new("foo", []), Generic.new("foo", [])) == true assert Generic.equal_value?(Generic.new("foo", []), Generic.new("foo", [])) == true
assert Generic.equal_value?(Generic.new("foo", []), Generic.new("bar", [])) == false assert Generic.equal_value?(Generic.new("foo", []), Generic.new("bar", [])) == false
assert Generic.equal_value?(Generic.new("foo", datatype: "foo"), RDF.XSD.String.new("foo")) == nil
assert Generic.equal_value?(Generic.new("foo", datatype: "foo"), RDF.XSD.String.new("foo")) ==
nil
end end
test "compare/2" do test "compare/2" do
Enum.each @valid, fn {input, {_, datatype}} -> Enum.each(@valid, fn {input, {_, datatype}} ->
assert Generic.compare( assert Generic.compare(
Generic.new(input, datatype: datatype), Generic.new(input, datatype: datatype),
Generic.new(input, datatype: datatype)) == :eq Generic.new(input, datatype: datatype)
end ) == :eq
end)
assert Generic.compare(Generic.new("foo", datatype: "en"), Generic.new("bar", datatype: "en")) == :gt assert Generic.compare(Generic.new("foo", datatype: "en"), Generic.new("bar", datatype: "en")) ==
assert Generic.compare(Generic.new("bar", datatype: "en"), Generic.new("baz", datatype: "en")) == :lt :gt
assert Generic.compare(Generic.new("bar", datatype: "en"), Generic.new("baz", datatype: "en")) ==
:lt
assert Generic.compare( assert Generic.compare(
Generic.new("foo", datatype: "en"), Generic.new("foo", datatype: "en"),
Generic.new("foo", datatype: "de")) == nil Generic.new("foo", datatype: "de")
) == nil
assert Generic.compare(Generic.new("foo", []), Generic.new("foo", [])) == nil assert Generic.compare(Generic.new("foo", []), Generic.new("foo", [])) == nil
assert Generic.compare(Generic.new("foo", []), RDF.XSD.String.new("foo")) == nil assert Generic.compare(Generic.new("foo", []), RDF.XSD.String.new("foo")) == nil
end end

View File

@ -5,171 +5,179 @@ defmodule RDF.LangStringTest do
alias RDF.XSD alias RDF.XSD
@valid %{ @valid %{
# input => { value , language } # input => { value, language }
"foo" => { "foo" , "en" }, "foo" => {"foo", "en"},
0 => { "0" , "en" }, 0 => {"0", "en"},
42 => { "42" , "en" }, 42 => {"42", "en"},
3.14 => { "3.14" , "en" }, 3.14 => {"3.14", "en"},
true => { "true" , "en" }, true => {"true", "en"},
false => { "false" , "en" }, false => {"false", "en"}
} }
describe "new" do describe "new" do
test "with value and language" do test "with value and language" do
Enum.each @valid, fn {input, {value, language}} -> Enum.each(@valid, fn {input, {value, language}} ->
assert %Literal{literal: %LangString{value: ^value, language: ^language}} = assert %Literal{literal: %LangString{value: ^value, language: ^language}} =
LangString.new(input, language: language) LangString.new(input, language: language)
assert %Literal{literal: %LangString{value: ^value, language: ^language}} = assert %Literal{literal: %LangString{value: ^value, language: ^language}} =
LangString.new(input, language: String.to_atom(language)) LangString.new(input, language: String.to_atom(language))
end end)
end end
test "with language directly" do test "with language directly" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert LangString.new(input, language) == LangString.new(input, language: language) assert LangString.new(input, language) == LangString.new(input, language: language)
assert LangString.new(input, String.to_atom(language)) == assert LangString.new(input, String.to_atom(language)) ==
LangString.new(input, language: String.to_atom(language)) LangString.new(input, language: String.to_atom(language))
end end)
end end
test "language get normalized to downcase" do test "language get normalized to downcase" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %LangString{value: ^value, language: "de"}} = assert %Literal{literal: %LangString{value: ^value, language: "de"}} =
LangString.new(input, language: "DE") LangString.new(input, language: "DE")
end end)
end end
test "with canonicalize opts" do test "with canonicalize opts" do
Enum.each @valid, fn {input, {value, language}} -> Enum.each(@valid, fn {input, {value, language}} ->
assert %Literal{literal: %LangString{value: ^value, language: ^language}} = assert %Literal{literal: %LangString{value: ^value, language: ^language}} =
LangString.new(input, language: language, canonicalize: true) LangString.new(input, language: language, canonicalize: true)
end end)
end end
test "without a language it produces an invalid literal" do test "without a language it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %LangString{value: ^value, language: nil}} = assert %Literal{literal: %LangString{value: ^value, language: nil}} =
literal = LangString.new(input, []) literal = LangString.new(input, [])
assert LangString.valid?(literal) == false assert LangString.valid?(literal) == false
end end)
end end
test "with nil as a language it produces an invalid literal" do test "with nil as a language it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %LangString{value: ^value, language: nil}} = assert %Literal{literal: %LangString{value: ^value, language: nil}} =
literal = LangString.new(input, language: nil) literal = LangString.new(input, language: nil)
assert LangString.valid?(literal) == false assert LangString.valid?(literal) == false
end end)
end end
test "with the empty string as a language it produces an invalid literal" do test "with the empty string as a language it produces an invalid literal" do
Enum.each @valid, fn {input, {value, _}} -> Enum.each(@valid, fn {input, {value, _}} ->
assert %Literal{literal: %LangString{value: ^value, language: nil}} = assert %Literal{literal: %LangString{value: ^value, language: nil}} =
literal = LangString.new(input, language: "") literal = LangString.new(input, language: "")
assert LangString.valid?(literal) == false assert LangString.valid?(literal) == false
end end)
end end
end end
describe "new!" do describe "new!" do
test "with valid values, it behaves the same as new" do test "with valid values, it behaves the same as new" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert LangString.new!(input, language: language) == assert LangString.new!(input, language: language) ==
LangString.new(input, language: language) LangString.new(input, language: language)
assert LangString.new!(input, language: language, canonicalize: true) == assert LangString.new!(input, language: language, canonicalize: true) ==
LangString.new(input, language: language, canonicalize: true) LangString.new(input, language: language, canonicalize: true)
end end)
end end
test "without a language it raises an error" do test "without a language it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> LangString.new!(input, []) end assert_raise ArgumentError, fn -> LangString.new!(input, []) end
end end)
end end
test "with nil as a language it raises an error" do test "with nil as a language it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> LangString.new!(input, language: nil) end assert_raise ArgumentError, fn -> LangString.new!(input, language: nil) end
end end)
end end
test "with the empty string as a language it raises an error" do test "with the empty string as a language it raises an error" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert_raise ArgumentError, fn -> LangString.new!(input, language: "") end assert_raise ArgumentError, fn -> LangString.new!(input, language: "") end
end end)
end end
end end
test "datatype?/1" do test "datatype?/1" do
assert LangString.datatype?(LangString) == true assert LangString.datatype?(LangString) == true
Enum.each @valid, fn {input, {_, language}} ->
Enum.each(@valid, fn {input, {_, language}} ->
literal = LangString.new(input, language: language) literal = LangString.new(input, language: language)
assert LangString.datatype?(literal) == true assert LangString.datatype?(literal) == true
assert LangString.datatype?(literal.literal) == true assert LangString.datatype?(literal.literal) == true
end end)
end end
test "datatype_id/1" do test "datatype_id/1" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert (LangString.new(input, language: language) |> LangString.datatype_id()) == RDF.iri(LangString.id()) assert LangString.new(input, language: language) |> LangString.datatype_id() ==
end RDF.iri(LangString.id())
end)
end end
test "language/1" do test "language/1" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert (LangString.new(input, language: language) |> LangString.language()) == language assert LangString.new(input, language: language) |> LangString.language() == language
end end)
assert (LangString.new("foo", language: nil) |> LangString.language()) == nil assert LangString.new("foo", language: nil) |> LangString.language() == nil
assert (LangString.new("foo", language: "") |> LangString.language()) == nil assert LangString.new("foo", language: "") |> LangString.language() == nil
end end
test "value/1" do test "value/1" do
Enum.each @valid, fn {input, {value, language}} -> Enum.each(@valid, fn {input, {value, language}} ->
assert (LangString.new(input, language: language) |> LangString.value()) == value assert LangString.new(input, language: language) |> LangString.value() == value
end end)
end end
test "lexical/1" do test "lexical/1" do
Enum.each @valid, fn {input, {value, language}} -> Enum.each(@valid, fn {input, {value, language}} ->
assert (LangString.new(input, language: language) |> LangString.lexical()) == value assert LangString.new(input, language: language) |> LangString.lexical() == value
end end)
end end
test "canonical/1" do test "canonical/1" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert (LangString.new(input, language: language) |> LangString.canonical()) == assert LangString.new(input, language: language) |> LangString.canonical() ==
LangString.new(input, language: language) LangString.new(input, language: language)
end end)
end end
test "canonical?/1" do test "canonical?/1" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert (LangString.new(input, language: language) |> LangString.canonical?()) == true assert LangString.new(input, language: language) |> LangString.canonical?() == true
end end)
end end
describe "valid?/1" do describe "valid?/1" do
test "with a language" do test "with a language" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert (LangString.new(input, language: language) |> LangString.valid?()) == true assert LangString.new(input, language: language) |> LangString.valid?() == true
end end)
end end
test "without a language" do test "without a language" do
Enum.each @valid, fn {input, _} -> Enum.each(@valid, fn {input, _} ->
assert (LangString.new(input, language: nil) |> LangString.valid?()) == false assert LangString.new(input, language: nil) |> LangString.valid?() == false
assert (LangString.new(input, language: "") |> LangString.valid?()) == false assert LangString.new(input, language: "") |> LangString.valid?() == false
end end)
end end
end end
describe "cast/1" do describe "cast/1" do
test "when given a valid RDF.LangString literal" do test "when given a valid RDF.LangString literal" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert LangString.new(input, language: language) |> LangString.cast() == assert LangString.new(input, language: language) |> LangString.cast() ==
LangString.new(input, language: language) LangString.new(input, language: language)
end end)
end end
test "when given an valid RDF.LangString literal" do test "when given an valid RDF.LangString literal" do
@ -192,15 +200,18 @@ defmodule RDF.LangStringTest do
end end
test "equal_value?/2" do test "equal_value?/2" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert LangString.equal_value?( assert LangString.equal_value?(
LangString.new(input, language: language), LangString.new(input, language: language),
LangString.new(input, language: language)) == true LangString.new(input, language: language)
end ) == true
end)
assert LangString.equal_value?( assert LangString.equal_value?(
LangString.new("foo", language: "en"), LangString.new("foo", language: "en"),
LangString.new("foo", language: "de")) == false LangString.new("foo", language: "de")
) == false
assert LangString.equal_value?(LangString.new("foo", []), LangString.new("foo", [])) == true assert LangString.equal_value?(LangString.new("foo", []), LangString.new("foo", [])) == true
assert LangString.equal_value?(LangString.new("foo", []), LangString.new("bar", [])) == false assert LangString.equal_value?(LangString.new("foo", []), LangString.new("bar", [])) == false
assert LangString.equal_value?(LangString.new("foo", []), RDF.XSD.String.new("foo")) == nil assert LangString.equal_value?(LangString.new("foo", []), RDF.XSD.String.new("foo")) == nil
@ -208,18 +219,28 @@ defmodule RDF.LangStringTest do
end end
test "compare/2" do test "compare/2" do
Enum.each @valid, fn {input, {_, language}} -> Enum.each(@valid, fn {input, {_, language}} ->
assert LangString.compare( assert LangString.compare(
LangString.new(input, language: language), LangString.new(input, language: language),
LangString.new(input, language: language)) == :eq LangString.new(input, language: language)
end ) == :eq
end)
assert LangString.compare(LangString.new("foo", language: "en"), LangString.new("bar", language: "en")) == :gt
assert LangString.compare(LangString.new("bar", language: "en"), LangString.new("baz", language: "en")) == :lt
assert LangString.compare( assert LangString.compare(
LangString.new("foo", language: "en"), LangString.new("foo", language: "en"),
LangString.new("foo", language: "de")) == nil LangString.new("bar", language: "en")
) == :gt
assert LangString.compare(
LangString.new("bar", language: "en"),
LangString.new("baz", language: "en")
) == :lt
assert LangString.compare(
LangString.new("foo", language: "en"),
LangString.new("foo", language: "de")
) == nil
assert LangString.compare(LangString.new("foo", []), LangString.new("foo", [])) == nil assert LangString.compare(LangString.new("foo", []), LangString.new("foo", [])) == nil
assert LangString.compare(LangString.new("foo", []), RDF.XSD.String.new("foo")) == nil assert LangString.compare(LangString.new("foo", []), RDF.XSD.String.new("foo")) == nil
end end
@ -231,35 +252,39 @@ defmodule RDF.LangStringTest do
{"de-DE", "de"}, {"de-DE", "de"},
{"de-CH", "de"}, {"de-CH", "de"},
{"de-CH", "de-ch"}, {"de-CH", "de-ch"},
{"de-DE-1996", "de-de"}, {"de-DE-1996", "de-de"}
] ]
@negative_examples [ @negative_examples [
{"en", "de"}, {"en", "de"},
{"de", "de-CH"}, {"de", "de-CH"},
{"de-Deva", "de-de"}, {"de-Deva", "de-de"},
{"de-Latn-DE", "de-de"}, {"de-Latn-DE", "de-de"}
] ]
test "with a language tag and a matching non-'*' language range" do test "with a language tag and a matching non-'*' language range" do
Enum.each @positive_examples, fn {language_tag, language_range} -> Enum.each(@positive_examples, fn {language_tag, language_range} ->
assert LangString.match_language?(language_tag, language_range), assert LangString.match_language?(language_tag, language_range),
"expected language range #{inspect language_range} to match language tag #{inspect language_tag}, but it didn't" "expected language range #{inspect(language_range)} to match language tag #{
end inspect(language_tag)
}, but it didn't"
end)
end end
test "with a language tag and a non-matching non-'*' language range" do test "with a language tag and a non-matching non-'*' language range" do
Enum.each @negative_examples, fn {language_tag, language_range} -> Enum.each(@negative_examples, fn {language_tag, language_range} ->
refute LangString.match_language?(language_tag, language_range), refute LangString.match_language?(language_tag, language_range),
"expected language range #{inspect language_range} to not match language tag #{inspect language_tag}, but it did" "expected language range #{inspect(language_range)} to not match language tag #{
end inspect(language_tag)
}, but it did"
end)
end end
test "with a language tag and '*' language range" do test "with a language tag and '*' language range" do
Enum.each @positive_examples ++ @negative_examples, fn {language_tag, _} -> Enum.each(@positive_examples ++ @negative_examples, fn {language_tag, _} ->
assert LangString.match_language?(language_tag, "*"), assert LangString.match_language?(language_tag, "*"),
~s[expected language range "*" to match language tag #{inspect language_tag}, but it didn't] ~s[expected language range "*" to match language tag #{inspect(language_tag)}, but it didn't]
end end)
end end
test "with the empty string as language tag" do test "with the empty string as language tag" do
@ -272,16 +297,22 @@ defmodule RDF.LangStringTest do
end end
test "with a RDF.LangString literal and a language range" do test "with a RDF.LangString literal and a language range" do
Enum.each @positive_examples, fn {language_tag, language_range} -> Enum.each(@positive_examples, fn {language_tag, language_range} ->
literal = LangString.new("foo", language: language_tag) literal = LangString.new("foo", language: language_tag)
assert LangString.match_language?(literal, language_range), assert LangString.match_language?(literal, language_range),
"expected language range #{inspect language_range} to match #{inspect literal}, but it didn't" "expected language range #{inspect(language_range)} to match #{inspect(literal)}, but it didn't"
end end)
Enum.each @negative_examples, fn {language_tag, language_range} ->
Enum.each(@negative_examples, fn {language_tag, language_range} ->
literal = LangString.new("foo", language: language_tag) literal = LangString.new("foo", language: language_tag)
refute LangString.match_language?(literal, language_range), refute LangString.match_language?(literal, language_range),
"expected language range #{inspect language_range} to not match #{inspect literal}, but it did" "expected language range #{inspect(language_range)} to not match #{
end inspect(literal)
}, but it did"
end)
refute LangString.match_language?(LangString.new("foo", language: ""), "de") refute LangString.match_language?(LangString.new("foo", language: ""), "de")
refute LangString.match_language?(LangString.new("foo", language: ""), "*") refute LangString.match_language?(LangString.new("foo", language: ""), "*")
refute LangString.match_language?(LangString.new("foo", language: nil), "de") refute LangString.match_language?(LangString.new("foo", language: nil), "de")

View File

@ -3,16 +3,19 @@ defmodule RDF.DescriptionTest do
doctest RDF.Description doctest RDF.Description
describe "new" do describe "new" do
test "with a subject IRI" do test "with a subject IRI" do
assert description_of_subject(Description.new(~I<http://example.com/description/subject>), assert description_of_subject(
~I<http://example.com/description/subject>) Description.new(~I<http://example.com/description/subject>),
~I<http://example.com/description/subject>
)
end end
test "with a raw subject IRI string" do test "with a raw subject IRI string" do
assert description_of_subject(Description.new("http://example.com/description/subject"), assert description_of_subject(
~I<http://example.com/description/subject>) Description.new("http://example.com/description/subject"),
~I<http://example.com/description/subject>
)
end end
test "with an unresolved subject IRI term atom" do test "with an unresolved subject IRI term atom" do
@ -24,34 +27,38 @@ defmodule RDF.DescriptionTest do
end end
test "with a single initial triple" do test "with a single initial triple" do
desc = Description.new({EX.Subject, EX.predicate, EX.Object}) desc = Description.new({EX.Subject, EX.predicate(), EX.Object})
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object)}) assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object)})
desc = Description.new(EX.Subject, EX.predicate, 42) desc = Description.new(EX.Subject, EX.predicate(), 42)
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate, literal(42)}) assert description_includes_predication(desc, {EX.predicate(), literal(42)})
end end
test "with a list of initial triples" do test "with a list of initial triples" do
desc = Description.new([{EX.Subject, EX.predicate1, EX.Object1}, desc =
{EX.Subject, EX.predicate2, EX.Object2}]) Description.new([
assert description_of_subject(desc, iri(EX.Subject)) {EX.Subject, EX.predicate1(), EX.Object1},
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) {EX.Subject, EX.predicate2(), EX.Object2}
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)}) ])
desc = Description.new(EX.Subject, EX.predicate, [EX.Object, bnode(:foo), "bar"])
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate, bnode(:foo)}) assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate, literal("bar")})
desc = Description.new(EX.Subject, EX.predicate(), [EX.Object, bnode(:foo), "bar"])
assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object)})
assert description_includes_predication(desc, {EX.predicate(), bnode(:foo)})
assert description_includes_predication(desc, {EX.predicate(), literal("bar")})
end end
test "from another description" do test "from another description" do
desc1 = Description.new({EX.Other, EX.predicate, EX.Object}) desc1 = Description.new({EX.Other, EX.predicate(), EX.Object})
desc2 = Description.new(EX.Subject, desc1) desc2 = Description.new(EX.Subject, desc1)
assert description_of_subject(desc2, iri(EX.Subject)) assert description_of_subject(desc2, iri(EX.Subject))
assert description_includes_predication(desc2, {EX.predicate, iri(EX.Object)}) assert description_includes_predication(desc2, {EX.predicate(), iri(EX.Object)})
end end
test "from a map with coercible RDF term" do test "from a map with coercible RDF term" do
@ -61,123 +68,150 @@ defmodule RDF.DescriptionTest do
end end
test "with another description as subject, it performs and add " do test "with another description as subject, it performs and add " do
desc = Description.new({EX.S, EX.p, EX.O}) desc = Description.new({EX.S, EX.p(), EX.O})
assert Description.new(desc, EX.p2, EX.O2) == assert Description.new(desc, EX.p2(), EX.O2) ==
Description.add(desc, EX.p2, EX.O2) Description.add(desc, EX.p2(), EX.O2)
assert Description.new(desc, EX.p, [EX.O1, EX.O2]) ==
Description.add(desc, EX.p, [EX.O1, EX.O2]) assert Description.new(desc, EX.p(), [EX.O1, EX.O2]) ==
Description.add(desc, EX.p(), [EX.O1, EX.O2])
end end
end end
describe "add" do describe "add" do
test "a predicate-object-pair of proper RDF terms" do test "a predicate-object-pair of proper RDF terms" do
assert Description.add(description(), EX.predicate, iri(EX.Object)) assert Description.add(description(), EX.predicate(), iri(EX.Object))
|> description_includes_predication({EX.predicate, iri(EX.Object)}) |> description_includes_predication({EX.predicate(), iri(EX.Object)})
assert Description.add(description(), {EX.predicate, iri(EX.Object)})
|> description_includes_predication({EX.predicate, iri(EX.Object)}) assert Description.add(description(), {EX.predicate(), iri(EX.Object)})
|> description_includes_predication({EX.predicate(), iri(EX.Object)})
end end
test "a predicate-object-pair of coercible RDF terms" do test "a predicate-object-pair of coercible RDF terms" do
assert Description.add(description(), assert Description.add(description(), "http://example.com/predicate", iri(EX.Object))
"http://example.com/predicate", iri(EX.Object)) |> description_includes_predication({EX.predicate(), iri(EX.Object)})
|> description_includes_predication({EX.predicate, iri(EX.Object)})
assert Description.add(description(), assert Description.add(
{"http://example.com/predicate", 42}) description(),
|> description_includes_predication({EX.predicate, literal(42)}) {"http://example.com/predicate", 42}
)
|> description_includes_predication({EX.predicate(), literal(42)})
assert Description.add(description(), assert Description.add(
{"http://example.com/predicate", true}) description(),
|> description_includes_predication({EX.predicate, literal(true)}) {"http://example.com/predicate", true}
)
|> description_includes_predication({EX.predicate(), literal(true)})
assert Description.add(description(), assert Description.add(
{"http://example.com/predicate", bnode(:foo)}) description(),
|> description_includes_predication({EX.predicate, bnode(:foo)}) {"http://example.com/predicate", bnode(:foo)}
)
|> description_includes_predication({EX.predicate(), bnode(:foo)})
end end
test "a proper triple" do test "a proper triple" do
assert Description.add(description(), assert Description.add(
{iri(EX.Subject), EX.predicate, iri(EX.Object)}) description(),
|> description_includes_predication({EX.predicate, iri(EX.Object)}) {iri(EX.Subject), EX.predicate(), iri(EX.Object)}
)
|> description_includes_predication({EX.predicate(), iri(EX.Object)})
assert Description.add(description(), assert Description.add(
{iri(EX.Subject), EX.predicate, literal(42)}) description(),
|> description_includes_predication({EX.predicate, literal(42)}) {iri(EX.Subject), EX.predicate(), literal(42)}
)
|> description_includes_predication({EX.predicate(), literal(42)})
assert Description.add(description(), assert Description.add(
{iri(EX.Subject), EX.predicate, bnode(:foo)}) description(),
|> description_includes_predication({EX.predicate, bnode(:foo)}) {iri(EX.Subject), EX.predicate(), bnode(:foo)}
)
|> description_includes_predication({EX.predicate(), bnode(:foo)})
end end
test "add ignores triples not about the subject of the Description struct" do test "add ignores triples not about the subject of the Description struct" do
assert empty_description( assert empty_description(
Description.add(description(), {EX.Other, EX.predicate, iri(EX.Object)})) Description.add(description(), {EX.Other, EX.predicate(), iri(EX.Object)})
)
end end
test "a list of predicate-object-pairs" do test "a list of predicate-object-pairs" do
desc = Description.add(description(), desc =
[{EX.predicate, EX.Object1}, {EX.predicate, EX.Object2}]) Description.add(
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object1)}) description(),
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object2)}) [{EX.predicate(), EX.Object1}, {EX.predicate(), EX.Object2}]
)
assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object2)})
end end
test "a list of triples" do test "a list of triples" do
desc = Description.add(description(), [ desc =
{EX.Subject, EX.predicate1, EX.Object1}, Description.add(description(), [
{EX.Subject, EX.predicate2, EX.Object2} {EX.Subject, EX.predicate1(), EX.Object1},
]) {EX.Subject, EX.predicate2(), EX.Object2}
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) ])
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
end end
test "a list of mixed triples and predicate-object-pairs" do test "a list of mixed triples and predicate-object-pairs" do
desc = Description.add(description(), [ desc =
{EX.predicate, EX.Object1}, Description.add(description(), [
{EX.Subject, EX.predicate, EX.Object2}, {EX.predicate(), EX.Object1},
{EX.Other, EX.predicate, EX.Object3} {EX.Subject, EX.predicate(), EX.Object2},
]) {EX.Other, EX.predicate(), EX.Object3}
])
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object1)}) assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate, iri(EX.Object2)}) assert description_includes_predication(desc, {EX.predicate(), iri(EX.Object2)})
refute description_includes_predication(desc, {EX.predicate, iri(EX.Object3)}) refute description_includes_predication(desc, {EX.predicate(), iri(EX.Object3)})
end end
test "another description" do test "another description" do
desc = description([{EX.predicate1, EX.Object1}, {EX.predicate2, EX.Object2}]) desc =
|> Description.add(Description.new({EX.Other, EX.predicate3, EX.Object3})) description([{EX.predicate1(), EX.Object1}, {EX.predicate2(), EX.Object2}])
|> Description.add(Description.new({EX.Other, EX.predicate3(), EX.Object3}))
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)}) assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate3, iri(EX.Object3)}) assert description_includes_predication(desc, {EX.predicate3(), iri(EX.Object3)})
desc = Description.add(desc, Description.new({EX.Other, EX.predicate1, EX.Object4})) desc = Description.add(desc, Description.new({EX.Other, EX.predicate1(), EX.Object4}))
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)}) assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate3, iri(EX.Object3)}) assert description_includes_predication(desc, {EX.predicate3(), iri(EX.Object3)})
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object4)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object4)})
end end
test "a map of predications with coercible RDF terms" do test "a map of predications with coercible RDF terms" do
desc = description([{EX.predicate1, EX.Object1}, {EX.predicate2, EX.Object2}]) desc =
|> Description.add(%{EX.predicate3 => EX.Object3}) description([{EX.predicate1(), EX.Object1}, {EX.predicate2(), EX.Object2}])
|> Description.add(%{EX.predicate3() => EX.Object3})
assert description_of_subject(desc, iri(EX.Subject)) assert description_of_subject(desc, iri(EX.Subject))
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)}) assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate3, iri(EX.Object3)}) assert description_includes_predication(desc, {EX.predicate3(), iri(EX.Object3)})
desc =
Description.add(desc, %{
EX.predicate1() => EX.Object1,
EX.predicate2() => [EX.Object2, 42],
EX.predicate3() => [bnode(:foo)]
})
desc = Description.add(desc, %{EX.predicate1 => EX.Object1,
EX.predicate2 => [EX.Object2, 42],
EX.predicate3 => [bnode(:foo)]})
assert Description.count(desc) == 5 assert Description.count(desc) == 5
assert description_includes_predication(desc, {EX.predicate1, iri(EX.Object1)}) assert description_includes_predication(desc, {EX.predicate1(), iri(EX.Object1)})
assert description_includes_predication(desc, {EX.predicate2, iri(EX.Object2)}) assert description_includes_predication(desc, {EX.predicate2(), iri(EX.Object2)})
assert description_includes_predication(desc, {EX.predicate2, literal(42)}) assert description_includes_predication(desc, {EX.predicate2(), literal(42)})
assert description_includes_predication(desc, {EX.predicate3, iri(EX.Object3)}) assert description_includes_predication(desc, {EX.predicate3(), iri(EX.Object3)})
assert description_includes_predication(desc, {EX.predicate3, bnode(:foo)}) assert description_includes_predication(desc, {EX.predicate3(), bnode(:foo)})
end end
test "a map of predications with non-coercible RDF terms" do test "a map of predications with non-coercible RDF terms" do
@ -186,17 +220,17 @@ defmodule RDF.DescriptionTest do
end end
assert_raise RDF.Literal.InvalidError, fn -> assert_raise RDF.Literal.InvalidError, fn ->
Description.add(description(), %{EX.prop => self()}) Description.add(description(), %{EX.prop() => self()})
end end
end end
test "duplicates are ignored" do test "duplicates are ignored" do
desc = Description.add(description(), {EX.predicate, EX.Object}) desc = Description.add(description(), {EX.predicate(), EX.Object})
assert Description.add(desc, {EX.predicate, EX.Object}) == desc assert Description.add(desc, {EX.predicate(), EX.Object}) == desc
assert Description.add(desc, {EX.Subject, EX.predicate, EX.Object}) == desc assert Description.add(desc, {EX.Subject, EX.predicate(), EX.Object}) == desc
desc = Description.add(description(), {EX.predicate, 42}) desc = Description.add(description(), {EX.predicate(), 42})
assert Description.add(desc, {EX.predicate, literal(42)}) == desc assert Description.add(desc, {EX.predicate(), literal(42)}) == desc
end end
test "non-coercible Triple elements are causing an error" do test "non-coercible Triple elements are causing an error" do
@ -205,7 +239,7 @@ defmodule RDF.DescriptionTest do
end end
assert_raise RDF.Literal.InvalidError, fn -> assert_raise RDF.Literal.InvalidError, fn ->
Description.add(description(), {EX.prop, self()}) Description.add(description(), {EX.prop(), self()})
end end
end end
end end
@ -213,115 +247,164 @@ defmodule RDF.DescriptionTest do
describe "delete" do describe "delete" do
setup do setup do
{:ok, {:ok,
empty_description: Description.new(EX.S), empty_description: Description.new(EX.S),
description1: Description.new(EX.S, EX.p, EX.O), description1: Description.new(EX.S, EX.p(), EX.O),
description2: Description.new(EX.S, EX.p, [EX.O1, EX.O2]), description2: Description.new(EX.S, EX.p(), [EX.O1, EX.O2]),
description3: Description.new(EX.S, [ description3:
{EX.p1, [EX.O1, EX.O2]}, Description.new(EX.S, [
{EX.p2, EX.O3}, {EX.p1(), [EX.O1, EX.O2]},
{EX.p3, [~B<foo>, ~L"bar"]}, {EX.p2(), EX.O3},
]) {EX.p3(), [~B<foo>, ~L"bar"]}
} ])}
end end
test "a single statement as a predicate object", test "a single statement as a predicate object",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete(empty_description, EX.p, EX.O) == empty_description empty_description: empty_description,
assert Description.delete(description1, EX.p, EX.O) == empty_description description1: description1,
assert Description.delete(description2, EX.p, EX.O1) == Description.new(EX.S, EX.p, EX.O2) description2: description2
} do
assert Description.delete(empty_description, EX.p(), EX.O) == empty_description
assert Description.delete(description1, EX.p(), EX.O) == empty_description
assert Description.delete(description2, EX.p(), EX.O1) ==
Description.new(EX.S, EX.p(), EX.O2)
end end
test "a single statement as a predicate-object tuple", test "a single statement as a predicate-object tuple",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete(empty_description, {EX.p, EX.O}) == empty_description empty_description: empty_description,
assert Description.delete(description1, {EX.p, EX.O}) == empty_description description1: description1,
assert Description.delete(description2, {EX.p, EX.O2}) == Description.new(EX.S, EX.p, EX.O1) description2: description2
} do
assert Description.delete(empty_description, {EX.p(), EX.O}) == empty_description
assert Description.delete(description1, {EX.p(), EX.O}) == empty_description
assert Description.delete(description2, {EX.p(), EX.O2}) ==
Description.new(EX.S, EX.p(), EX.O1)
end end
test "a single statement as a subject-predicate-object tuple and the proper description subject", test "a single statement as a subject-predicate-object tuple and the proper description subject",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete(empty_description, {EX.S, EX.p, EX.O}) == empty_description empty_description: empty_description,
assert Description.delete(description1, {EX.S, EX.p, EX.O}) == empty_description description1: description1,
assert Description.delete(description2, {EX.S, EX.p, EX.O2}) == Description.new(EX.S, EX.p, EX.O1) description2: description2
} do
assert Description.delete(empty_description, {EX.S, EX.p(), EX.O}) == empty_description
assert Description.delete(description1, {EX.S, EX.p(), EX.O}) == empty_description
assert Description.delete(description2, {EX.S, EX.p(), EX.O2}) ==
Description.new(EX.S, EX.p(), EX.O1)
end end
test "a single statement as a subject-predicate-object tuple and another description subject", test "a single statement as a subject-predicate-object tuple and another description subject",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete(empty_description, {EX.Other, EX.p, EX.O}) == empty_description empty_description: empty_description,
assert Description.delete(description1, {EX.Other, EX.p, EX.O}) == description1 description1: description1,
assert Description.delete(description2, {EX.Other, EX.p, EX.O2}) == description2 description2: description2
} do
assert Description.delete(empty_description, {EX.Other, EX.p(), EX.O}) == empty_description
assert Description.delete(description1, {EX.Other, EX.p(), EX.O}) == description1
assert Description.delete(description2, {EX.Other, EX.p(), EX.O2}) == description2
end end
test "multiple statements via predicate-objects tuple", test "multiple statements via predicate-objects tuple",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete(empty_description, {EX.p, [EX.O1, EX.O2]}) == empty_description empty_description: empty_description,
assert Description.delete(description1, {EX.p, [EX.O, EX.O2]}) == empty_description description1: description1,
assert Description.delete(description2, {EX.p, [EX.O1, EX.O2]}) == empty_description description2: description2
} do
assert Description.delete(empty_description, {EX.p(), [EX.O1, EX.O2]}) == empty_description
assert Description.delete(description1, {EX.p(), [EX.O, EX.O2]}) == empty_description
assert Description.delete(description2, {EX.p(), [EX.O1, EX.O2]}) == empty_description
end end
test "multiple statements with a list", test "multiple statements with a list",
%{empty_description: empty_description, description3: description3} do %{empty_description: empty_description, description3: description3} do
assert Description.delete(empty_description, [{EX.p, [EX.O1, EX.O2]}]) == empty_description assert Description.delete(empty_description, [{EX.p(), [EX.O1, EX.O2]}]) ==
empty_description
assert Description.delete(description3, [ assert Description.delete(description3, [
{EX.p1, EX.O1}, {EX.p1(), EX.O1},
{EX.p2, [EX.O2, EX.O3]}, {EX.p2(), [EX.O2, EX.O3]},
{EX.S, EX.p3, [~B<foo>, ~L"bar"]}, {EX.S, EX.p3(), [~B<foo>, ~L"bar"]}
]) == Description.new(EX.S, EX.p1, EX.O2) ]) == Description.new(EX.S, EX.p1(), EX.O2)
end end
test "multiple statements with a map of predications", test "multiple statements with a map of predications",
%{empty_description: empty_description, description3: description3} do %{empty_description: empty_description, description3: description3} do
assert Description.delete(empty_description, %{EX.p => EX.O1}) == empty_description assert Description.delete(empty_description, %{EX.p() => EX.O1}) == empty_description
assert Description.delete(description3, %{ assert Description.delete(description3, %{
EX.p1 => EX.O1, EX.p1() => EX.O1,
EX.p2 => [EX.O2, EX.O3], EX.p2() => [EX.O2, EX.O3],
EX.p3 => [~B<foo>, ~L"bar"], EX.p3() => [~B<foo>, ~L"bar"]
}) == Description.new(EX.S, EX.p1, EX.O2) }) == Description.new(EX.S, EX.p1(), EX.O2)
end end
test "multiple statements with another description", test "multiple statements with another description",
%{empty_description: empty_description, description1: description1, description3: description3} do %{
empty_description: empty_description,
description1: description1,
description3: description3
} do
assert Description.delete(empty_description, description1) == empty_description assert Description.delete(empty_description, description1) == empty_description
assert Description.delete(description3, Description.new(EX.S, %{
EX.p1 => EX.O1, assert Description.delete(
EX.p2 => [EX.O2, EX.O3], description3,
EX.p3 => [~B<foo>, ~L"bar"], Description.new(EX.S, %{
})) == Description.new(EX.S, EX.p1, EX.O2) EX.p1() => EX.O1,
EX.p2() => [EX.O2, EX.O3],
EX.p3() => [~B<foo>, ~L"bar"]
})
) == Description.new(EX.S, EX.p1(), EX.O2)
end end
end end
describe "delete_predicates" do describe "delete_predicates" do
setup do setup do
{:ok, {:ok,
empty_description: Description.new(EX.S), empty_description: Description.new(EX.S),
description1: Description.new(EX.S, EX.p, [EX.O1, EX.O2]), description1: Description.new(EX.S, EX.p(), [EX.O1, EX.O2]),
description2: Description.new(EX.S, [ description2:
{EX.P1, [EX.O1, EX.O2]}, Description.new(EX.S, [
{EX.p2, [~B<foo>, ~L"bar"]}, {EX.P1, [EX.O1, EX.O2]},
]) {EX.p2(), [~B<foo>, ~L"bar"]}
} ])}
end end
test "a single property", test "a single property",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete_predicates(description1, EX.p) == empty_description empty_description: empty_description,
description1: description1,
description2: description2
} do
assert Description.delete_predicates(description1, EX.p()) == empty_description
assert Description.delete_predicates(description2, EX.P1) == assert Description.delete_predicates(description2, EX.P1) ==
Description.new(EX.S, EX.p2, [~B<foo>, ~L"bar"]) Description.new(EX.S, EX.p2(), [~B<foo>, ~L"bar"])
end end
test "a list of properties", test "a list of properties",
%{empty_description: empty_description, description1: description1, description2: description2} do %{
assert Description.delete_predicates(description1, [EX.p]) == empty_description empty_description: empty_description,
assert Description.delete_predicates(description2, [EX.P1, EX.p2, EX.p3]) == empty_description description1: description1,
description2: description2
} do
assert Description.delete_predicates(description1, [EX.p()]) == empty_description
assert Description.delete_predicates(description2, [EX.P1, EX.p2(), EX.p3()]) ==
empty_description
end end
end end
describe "update/4" do describe "update/4" do
test "list values returned from the update function become new coerced objects of the predicate" do test "list values returned from the update function become new coerced objects of the predicate" do
assert Description.new(EX.S, EX.P, [EX.O1, EX.O2]) assert Description.new(EX.S, EX.P, [EX.O1, EX.O2])
|> Description.update(EX.P, |> Description.update(
fn [_object | other] -> [EX.O3 | other] end) == EX.P,
fn [_object | other] -> [EX.O3 | other] end
) ==
Description.new(EX.S, EX.P, [EX.O3, EX.O2]) Description.new(EX.S, EX.P, [EX.O3, EX.O2])
end end
@ -332,18 +415,22 @@ defmodule RDF.DescriptionTest do
end end
test "returning an empty list or nil from the update function causes a removal of the predications" do test "returning an empty list or nil from the update function causes a removal of the predications" do
description = EX.S description =
|> EX.p(EX.O1, EX.O2) EX.S
|> EX.p(EX.O1, EX.O2)
assert description assert description
|> Description.update(EX.p, fn _ -> [] end) == |> Description.update(EX.p(), fn _ -> [] end) ==
Description.new(EX.S, {EX.p, []}) Description.new(EX.S, {EX.p(), []})
assert description assert description
|> Description.update(EX.p, fn _ -> nil end) == |> Description.update(EX.p(), fn _ -> nil end) ==
Description.new(EX.S, {EX.p, []}) Description.new(EX.S, {EX.p(), []})
end end
test "when the property is not present the initial object value is added for the predicate and the update function not called" do test "when the property is not present the initial object value is added for the predicate and the update function not called" do
fun = fn _ -> raise "should not be called" end fun = fn _ -> raise "should not be called" end
assert Description.new(EX.S) assert Description.new(EX.S)
|> Description.update(EX.P, EX.O, fun) == |> Description.update(EX.P, EX.O, fun) ==
Description.new(EX.S, EX.P, EX.O) Description.new(EX.S, EX.P, EX.O)
@ -357,143 +444,174 @@ defmodule RDF.DescriptionTest do
test "pop" do test "pop" do
assert Description.pop(Description.new(EX.S)) == {nil, Description.new(EX.S)} assert Description.pop(Description.new(EX.S)) == {nil, Description.new(EX.S)}
{triple, desc} = Description.new({EX.S, EX.p, EX.O}) |> Description.pop {triple, desc} = Description.new({EX.S, EX.p(), EX.O}) |> Description.pop()
assert {iri(EX.S), iri(EX.p), iri(EX.O)} == triple assert {iri(EX.S), iri(EX.p()), iri(EX.O)} == triple
assert Enum.count(desc.predications) == 0 assert Enum.count(desc.predications) == 0
{{subject, predicate, _}, desc} = {{subject, predicate, _}, desc} =
Description.new([{EX.S, EX.p, EX.O1}, {EX.S, EX.p, EX.O2}]) Description.new([{EX.S, EX.p(), EX.O1}, {EX.S, EX.p(), EX.O2}])
|> Description.pop |> Description.pop()
assert {subject, predicate} == {iri(EX.S), iri(EX.p)}
assert {subject, predicate} == {iri(EX.S), iri(EX.p())}
assert Enum.count(desc.predications) == 1 assert Enum.count(desc.predications) == 1
{{subject, _, _}, desc} = {{subject, _, _}, desc} =
Description.new([{EX.S, EX.p1, EX.O1}, {EX.S, EX.p2, EX.O2}]) Description.new([{EX.S, EX.p1(), EX.O1}, {EX.S, EX.p2(), EX.O2}])
|> Description.pop |> Description.pop()
assert subject == iri(EX.S) assert subject == iri(EX.S)
assert Enum.count(desc.predications) == 1 assert Enum.count(desc.predications) == 1
end end
test "values/1" do test "values/1" do
assert Description.new(EX.s) |> Description.values() == %{} assert Description.new(EX.s()) |> Description.values() == %{}
assert Description.new({EX.s, EX.p, ~L"Foo"}) |> Description.values() ==
%{RDF.Term.value(EX.p) => ["Foo"]} assert Description.new({EX.s(), EX.p(), ~L"Foo"}) |> Description.values() ==
%{RDF.Term.value(EX.p()) => ["Foo"]}
end end
test "values/2" do test "values/2" do
mapping = fn mapping = fn
{:predicate, predicate} -> {:predicate, predicate} ->
predicate |> to_string() |> String.split("/") |> List.last() |> String.to_atom() predicate |> to_string() |> String.split("/") |> List.last() |> String.to_atom()
{_, term} -> {_, term} ->
RDF.Term.value(term) RDF.Term.value(term)
end end
assert Description.new(EX.s) |> Description.values(mapping) == %{} assert Description.new(EX.s()) |> Description.values(mapping) == %{}
assert Description.new({EX.s, EX.p, ~L"Foo"}) |> Description.values(mapping) ==
assert Description.new({EX.s(), EX.p(), ~L"Foo"}) |> Description.values(mapping) ==
%{p: ["Foo"]} %{p: ["Foo"]}
end end
describe "take/2" do describe "take/2" do
test "with a non-empty property list" do test "with a non-empty property list" do
assert Description.new([{EX.S, EX.p1, EX.O1}, {EX.S, EX.p2, EX.O2}]) assert Description.new([{EX.S, EX.p1(), EX.O1}, {EX.S, EX.p2(), EX.O2}])
|> Description.take([EX.p2, EX.p3]) == |> Description.take([EX.p2(), EX.p3()]) ==
Description.new({EX.S, EX.p2, EX.O2}) Description.new({EX.S, EX.p2(), EX.O2})
end end
test "with an empty property list" do test "with an empty property list" do
assert Description.new([{EX.S, EX.p1, EX.O1}, {EX.S, EX.p2, EX.O2}]) assert Description.new([{EX.S, EX.p1(), EX.O1}, {EX.S, EX.p2(), EX.O2}])
|> Description.take([]) == Description.new(EX.S) |> Description.take([]) == Description.new(EX.S)
end end
test "with nil" do test "with nil" do
assert Description.new([{EX.S, EX.p1, EX.O1}, {EX.S, EX.p2, EX.O2}]) assert Description.new([{EX.S, EX.p1(), EX.O1}, {EX.S, EX.p2(), EX.O2}])
|> Description.take(nil) == |> Description.take(nil) ==
Description.new([{EX.S, EX.p1, EX.O1}, {EX.S, EX.p2, EX.O2}]) Description.new([{EX.S, EX.p1(), EX.O1}, {EX.S, EX.p2(), EX.O2}])
end end
end end
test "equal/2" do test "equal/2" do
assert Description.new({EX.S, EX.p, EX.O}) |> Description.equal?(Description.new({EX.S, EX.p, EX.O})) assert Description.new({EX.S, EX.p(), EX.O})
refute Description.new({EX.S, EX.p, EX.O}) |> Description.equal?(Description.new({EX.S, EX.p, EX.O2})) |> Description.equal?(Description.new({EX.S, EX.p(), EX.O}))
refute Description.new({EX.S, EX.p(), EX.O})
|> Description.equal?(Description.new({EX.S, EX.p(), EX.O2}))
end end
describe "Enumerable protocol" do describe "Enumerable protocol" do
test "Enum.count" do test "Enum.count" do
assert Enum.count(Description.new EX.foo) == 0 assert Enum.count(Description.new(EX.foo())) == 0
assert Enum.count(Description.new {EX.S, EX.p, EX.O}) == 1 assert Enum.count(Description.new({EX.S, EX.p(), EX.O})) == 1
assert Enum.count(Description.new [{EX.S, EX.p, EX.O1}, {EX.S, EX.p, EX.O2}]) == 2 assert Enum.count(Description.new([{EX.S, EX.p(), EX.O1}, {EX.S, EX.p(), EX.O2}])) == 2
end end
test "Enum.member?" do test "Enum.member?" do
refute Enum.member?(Description.new(EX.S), {iri(EX.S), EX.p, iri(EX.O)}) refute Enum.member?(Description.new(EX.S), {iri(EX.S), EX.p(), iri(EX.O)})
assert Enum.member?(Description.new({EX.S, EX.p, EX.O}), {EX.S, EX.p, EX.O}) assert Enum.member?(Description.new({EX.S, EX.p(), EX.O}), {EX.S, EX.p(), EX.O})
desc = Description.new([ desc =
{EX.Subject, EX.predicate1, EX.Object1}, Description.new([
{EX.Subject, EX.predicate2, EX.Object2}, {EX.Subject, EX.predicate1(), EX.Object1},
{EX.predicate2, EX.Object3}]) {EX.Subject, EX.predicate2(), EX.Object2},
assert Enum.member?(desc, {EX.Subject, EX.predicate1, EX.Object1}) {EX.predicate2(), EX.Object3}
assert Enum.member?(desc, {EX.Subject, EX.predicate2, EX.Object2}) ])
assert Enum.member?(desc, {EX.Subject, EX.predicate2, EX.Object3})
refute Enum.member?(desc, {EX.Subject, EX.predicate1, EX.Object2}) assert Enum.member?(desc, {EX.Subject, EX.predicate1(), EX.Object1})
assert Enum.member?(desc, {EX.Subject, EX.predicate2(), EX.Object2})
assert Enum.member?(desc, {EX.Subject, EX.predicate2(), EX.Object3})
refute Enum.member?(desc, {EX.Subject, EX.predicate1(), EX.Object2})
end end
test "Enum.reduce" do test "Enum.reduce" do
desc = Description.new([ desc =
{EX.Subject, EX.predicate1, EX.Object1}, Description.new([
{EX.Subject, EX.predicate2, EX.Object2}, {EX.Subject, EX.predicate1(), EX.Object1},
{EX.predicate2, EX.Object3}]) {EX.Subject, EX.predicate2(), EX.Object2},
assert desc == Enum.reduce(desc, description(), {EX.predicate2(), EX.Object3}
fn(triple, acc) -> acc |> Description.add(triple) end) ])
assert desc ==
Enum.reduce(desc, description(), fn triple, acc ->
acc |> Description.add(triple)
end)
end end
end end
describe "Collectable protocol" do describe "Collectable protocol" do
test "with a map" do test "with a map" do
map = %{ map = %{
EX.predicate1 => EX.Object1, EX.predicate1() => EX.Object1,
EX.predicate2 => EX.Object2 EX.predicate2() => EX.Object2
} }
assert Enum.into(map, Description.new(EX.Subject)) == Description.new(EX.Subject, map) assert Enum.into(map, Description.new(EX.Subject)) == Description.new(EX.Subject, map)
end end
test "with a list of triples" do test "with a list of triples" do
triples = [ triples = [
{EX.Subject, EX.predicate1, EX.Object1}, {EX.Subject, EX.predicate1(), EX.Object1},
{EX.Subject, EX.predicate2, EX.Object2} {EX.Subject, EX.predicate2(), EX.Object2}
] ]
assert Enum.into(triples, Description.new(EX.Subject)) == Description.new(triples) assert Enum.into(triples, Description.new(EX.Subject)) == Description.new(triples)
end end
test "with a list of predicate-object pairs" do test "with a list of predicate-object pairs" do
pairs = [ pairs = [
{EX.predicate1, EX.Object1}, {EX.predicate1(), EX.Object1},
{EX.predicate2, EX.Object2} {EX.predicate2(), EX.Object2}
] ]
assert Enum.into(pairs, Description.new(EX.Subject)) == Description.new(EX.Subject, pairs) assert Enum.into(pairs, Description.new(EX.Subject)) == Description.new(EX.Subject, pairs)
end end
test "with a list of lists" do test "with a list of lists" do
lists = [ lists = [
[EX.Subject, EX.predicate1, EX.Object1], [EX.Subject, EX.predicate1(), EX.Object1],
[EX.Subject, EX.predicate2, EX.Object2] [EX.Subject, EX.predicate2(), EX.Object2]
] ]
assert Enum.into(lists, Description.new(EX.Subject)) == assert Enum.into(lists, Description.new(EX.Subject)) ==
Description.new(Enum.map(lists, &List.to_tuple/1)) Description.new(Enum.map(lists, &List.to_tuple/1))
end end
end end
describe "Access behaviour" do describe "Access behaviour" do
test "access with the [] operator" do test "access with the [] operator" do
assert Description.new(EX.Subject)[EX.predicate] == nil assert Description.new(EX.Subject)[EX.predicate()] == nil
assert Description.new(EX.Subject, EX.predicate, EX.Object)[EX.predicate] == [iri(EX.Object)]
assert Description.new(EX.Subject, EX.Predicate, EX.Object)[EX.Predicate] == [iri(EX.Object)] assert Description.new(EX.Subject, EX.predicate(), EX.Object)[EX.predicate()] == [
assert Description.new(EX.Subject, EX.predicate, EX.Object)["http://example.com/predicate"] == [iri(EX.Object)] iri(EX.Object)
assert Description.new([{EX.Subject, EX.predicate1, EX.Object1}, ]
{EX.Subject, EX.predicate1, EX.Object2},
{EX.Subject, EX.predicate2, EX.Object3}])[EX.predicate1] == assert Description.new(EX.Subject, EX.Predicate, EX.Object)[EX.Predicate] == [
[iri(EX.Object1), iri(EX.Object2)] iri(EX.Object)
]
assert Description.new(EX.Subject, EX.predicate(), EX.Object)[
"http://example.com/predicate"
] == [iri(EX.Object)]
assert Description.new([
{EX.Subject, EX.predicate1(), EX.Object1},
{EX.Subject, EX.predicate1(), EX.Object2},
{EX.Subject, EX.predicate2(), EX.Object3}
])[EX.predicate1()] ==
[iri(EX.Object1), iri(EX.Object2)]
end end
end end
end end

View File

@ -8,12 +8,16 @@ defmodule RDF.DiffTest do
test "new" do test "new" do
assert Diff.new() == assert Diff.new() ==
%Diff{additions: Graph.new(), deletions: Graph.new()} %Diff{additions: Graph.new(), deletions: Graph.new()}
assert Diff.new(additions: [], deletions: []) == assert Diff.new(additions: [], deletions: []) ==
%Diff{additions: Graph.new(), deletions: Graph.new()} %Diff{additions: Graph.new(), deletions: Graph.new()}
assert Diff.new(additions: Graph.new(), deletions: Graph.new) ==
assert Diff.new(additions: Graph.new(), deletions: Graph.new()) ==
%Diff{additions: Graph.new(), deletions: Graph.new()} %Diff{additions: Graph.new(), deletions: Graph.new()}
description = Description.new({EX.S, EX.p, EX.O1})
graph = Graph.new({EX.S, EX.p, EX.O2}) description = Description.new({EX.S, EX.p(), EX.O1})
graph = Graph.new({EX.S, EX.p(), EX.O2})
assert Diff.new(additions: description, deletions: graph) == assert Diff.new(additions: description, deletions: graph) ==
%Diff{additions: Graph.new(description), deletions: graph} %Diff{additions: Graph.new(description), deletions: graph}
end end
@ -26,43 +30,54 @@ defmodule RDF.DiffTest do
end end
test "with two descriptions with different subjects" do test "with two descriptions with different subjects" do
description1 = Description.new({EX.S1, EX.p, EX.O}) description1 = Description.new({EX.S1, EX.p(), EX.O})
description2 = Description.new({EX.S2, EX.p, EX.O}) description2 = Description.new({EX.S2, EX.p(), EX.O})
assert Diff.diff(description1, description2) == assert Diff.diff(description1, description2) ==
Diff.new(additions: Graph.new(description2), Diff.new(
deletions: Graph.new(description1)) additions: Graph.new(description2),
deletions: Graph.new(description1)
)
end end
test "with two descriptions when the second description has additional statements" do test "with two descriptions when the second description has additional statements" do
description1 = Description.new({EX.S, EX.p, EX.O}) description1 = Description.new({EX.S, EX.p(), EX.O})
description2 = description2 =
description1 description1
|> EX.p(EX.O2) |> EX.p(EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
assert Diff.diff(description1, description2) == assert Diff.diff(description1, description2) ==
Diff.new(additions: Graph.new( Diff.new(
EX.S additions:
|> EX.p(EX.O2) Graph.new(
|> EX.p2(EX.O) EX.S
), |> EX.p(EX.O2)
deletions: Graph.new()) |> EX.p2(EX.O)
),
deletions: Graph.new()
)
end end
test "with two descriptions when the first description has additional statements" do test "with two descriptions when the first description has additional statements" do
description1 = Description.new({EX.S, EX.p, EX.O}) description1 = Description.new({EX.S, EX.p(), EX.O})
description2 = description2 =
description1 description1
|> EX.p(EX.O2) |> EX.p(EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
assert Diff.diff(description2, description1) == assert Diff.diff(description2, description1) ==
Diff.new(additions: Graph.new, Diff.new(
deletions: Graph.new( additions: Graph.new(),
EX.S deletions:
|> EX.p(EX.O2) Graph.new(
|> EX.p2(EX.O) EX.S
)) |> EX.p(EX.O2)
|> EX.p2(EX.O)
)
)
end end
end end
@ -71,6 +86,7 @@ defmodule RDF.DiffTest do
EX.S EX.S
|> EX.p(EX.O1, EX.O2) |> EX.p(EX.O1, EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
description2 = description2 =
EX.S EX.S
|> EX.p(EX.O1, EX.O3) |> EX.p(EX.O1, EX.O3)
@ -78,17 +94,19 @@ defmodule RDF.DiffTest do
assert Diff.diff(description1, description2) == assert Diff.diff(description1, description2) ==
Diff.new( Diff.new(
additions: Graph.new( additions:
EX.S Graph.new(
|> EX.p(EX.O3) EX.S
|> EX.p3(EX.O) |> EX.p(EX.O3)
|> EX.p3(EX.O)
), ),
deletions: Graph.new( deletions:
EX.S Graph.new(
|> EX.p(EX.O2) EX.S
|> EX.p2(EX.O) |> EX.p(EX.O2)
)) |> EX.p2(EX.O)
)
)
end end
test "with one description and a graph" do test "with one description and a graph" do
@ -96,112 +114,141 @@ defmodule RDF.DiffTest do
EX.S1 EX.S1
|> EX.p(EX.O1, EX.O2) |> EX.p(EX.O1, EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
graph = Graph.new([
EX.S1 graph =
|> EX.p(EX.O2, EX.O3) Graph.new([
|> EX.p3(EX.O), EX.S1
EX.S3 |> EX.p(EX.O2, EX.O3)
|> EX.p(EX.O) |> EX.p3(EX.O),
]) EX.S3
|> EX.p(EX.O)
])
assert Diff.diff(description, graph) == assert Diff.diff(description, graph) ==
Diff.new( Diff.new(
additions: Graph.new([ additions:
EX.S1 Graph.new([
|> EX.p(EX.O3) EX.S1
|> EX.p3(EX.O), |> EX.p(EX.O3)
EX.S3 |> EX.p3(EX.O),
|> EX.p(EX.O) EX.S3
]), |> EX.p(EX.O)
deletions: Graph.new([ ]),
EX.S1 deletions:
|> EX.p(EX.O1) Graph.new([
|> EX.p2(EX.O), EX.S1
])) |> EX.p(EX.O1)
|> EX.p2(EX.O)
])
)
assert Diff.diff(graph, description) == assert Diff.diff(graph, description) ==
Diff.new( Diff.new(
additions: Graph.new([ additions:
EX.S1 Graph.new([
|> EX.p(EX.O1) EX.S1
|> EX.p2(EX.O), |> EX.p(EX.O1)
]), |> EX.p2(EX.O)
deletions: Graph.new([ ]),
EX.S1 deletions:
|> EX.p(EX.O3) Graph.new([
|> EX.p3(EX.O), EX.S1
EX.S3 |> EX.p(EX.O3)
|> EX.p(EX.O) |> EX.p3(EX.O),
]) EX.S3
|> EX.p(EX.O)
])
) )
disjoint_description = disjoint_description =
EX.S EX.S
|> EX.p(EX.O1, EX.O2) |> EX.p(EX.O1, EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
assert Diff.diff(disjoint_description, graph) == assert Diff.diff(disjoint_description, graph) ==
Diff.new( Diff.new(
additions: graph, additions: graph,
deletions: Graph.new(disjoint_description)) deletions: Graph.new(disjoint_description)
)
assert Diff.diff(graph, disjoint_description) == assert Diff.diff(graph, disjoint_description) ==
Diff.new( Diff.new(
additions: Graph.new(disjoint_description), additions: Graph.new(disjoint_description),
deletions: graph) deletions: graph
)
end end
test "with two graphs with additions and deletions" do test "with two graphs with additions and deletions" do
graph1 = Graph.new([ graph1 =
EX.S1 Graph.new([
|> EX.p(EX.O1, EX.O2) EX.S1
|> EX.p2(EX.O), |> EX.p(EX.O1, EX.O2)
EX.S2 |> EX.p2(EX.O),
|> EX.p(EX.O) EX.S2
]) |> EX.p(EX.O)
graph2 = Graph.new([ ])
EX.S1
|> EX.p(EX.O2, EX.O3) graph2 =
|> EX.p3(EX.O), Graph.new([
EX.S3 EX.S1
|> EX.p(EX.O) |> EX.p(EX.O2, EX.O3)
]) |> EX.p3(EX.O),
EX.S3
|> EX.p(EX.O)
])
assert Diff.diff(graph1, graph2) == assert Diff.diff(graph1, graph2) ==
Diff.new( Diff.new(
additions: Graph.new([ additions:
EX.S1 Graph.new([
|> EX.p(EX.O3) EX.S1
|> EX.p3(EX.O), |> EX.p(EX.O3)
EX.S3 |> EX.p3(EX.O),
|> EX.p(EX.O) EX.S3
]), |> EX.p(EX.O)
deletions: Graph.new([ ]),
EX.S1 deletions:
|> EX.p(EX.O1) Graph.new([
|> EX.p2(EX.O), EX.S1
EX.S2 |> EX.p(EX.O1)
|> EX.p(EX.O) |> EX.p2(EX.O),
])) EX.S2
|> EX.p(EX.O)
])
)
end end
test "merge/2" do test "merge/2" do
assert Diff.merge( assert Diff.merge(
Diff.new(additions: Graph.new({EX.S, EX.p, EX.O1}), Diff.new(
deletions: Graph.new({EX.S1, EX.p, EX.O})), additions: Graph.new({EX.S, EX.p(), EX.O1}),
Diff.new(additions: Graph.new({EX.S, EX.p, EX.O2}), deletions: Graph.new({EX.S1, EX.p(), EX.O})
deletions: Graph.new({EX.S2, EX.p, EX.O})) ),
Diff.new(
additions: Graph.new({EX.S, EX.p(), EX.O2}),
deletions: Graph.new({EX.S2, EX.p(), EX.O})
)
) == ) ==
Diff.new( Diff.new(
additions: Graph.new({EX.S, EX.p, [EX.O1, EX.O2]}), additions: Graph.new({EX.S, EX.p(), [EX.O1, EX.O2]}),
deletions: Graph.new([ deletions:
{EX.S1, EX.p, EX.O}, Graph.new([
{EX.S2, EX.p, EX.O} {EX.S1, EX.p(), EX.O},
]) {EX.S2, EX.p(), EX.O}
])
) )
end end
test "empty?/1" do test "empty?/1" do
assert Diff.empty?(Diff.new()) == true assert Diff.empty?(Diff.new()) == true
assert Diff.empty?(Diff.new(additions: EX.p(EX.S, EX.O),
deletions: EX.p(EX.S, EX.O))) == false assert Diff.empty?(
Diff.new(
additions: EX.p(EX.S, EX.O),
deletions: EX.p(EX.S, EX.O)
)
) == false
assert Diff.empty?(Diff.new(additions: EX.p(EX.S, EX.O))) == false assert Diff.empty?(Diff.new(additions: EX.p(EX.S, EX.O))) == false
assert Diff.empty?(Diff.new(deletions: EX.p(EX.S, EX.O))) == false assert Diff.empty?(Diff.new(deletions: EX.p(EX.S, EX.O))) == false
end end
@ -209,27 +256,32 @@ defmodule RDF.DiffTest do
describe "apply/2" do describe "apply/2" do
test "on a graph" do test "on a graph" do
assert Diff.new( assert Diff.new(
additions: Graph.new([ additions:
Graph.new([
EX.S1 EX.S1
|> EX.p(EX.O3) |> EX.p(EX.O3)
|> EX.p3(EX.O), |> EX.p3(EX.O),
EX.S3 EX.S3
|> EX.p(EX.O) |> EX.p(EX.O)
]), ]),
deletions: Graph.new([ deletions:
Graph.new([
EX.S1 EX.S1
|> EX.p(EX.O1) |> EX.p(EX.O1)
|> EX.p2(EX.O), |> EX.p2(EX.O),
EX.S2 EX.S2
|> EX.p(EX.O) |> EX.p(EX.O)
])) ])
|> Diff.apply(Graph.new([ )
EX.S1 |> Diff.apply(
|> EX.p(EX.O1, EX.O2) Graph.new([
|> EX.p2(EX.O), EX.S1
EX.S2 |> EX.p(EX.O1, EX.O2)
|> EX.p(EX.O) |> EX.p2(EX.O),
])) == EX.S2
|> EX.p(EX.O)
])
) ==
Graph.new([ Graph.new([
EX.S1 EX.S1
|> EX.p(EX.O2, EX.O3) |> EX.p(EX.O2, EX.O3)
@ -241,23 +293,26 @@ defmodule RDF.DiffTest do
test "on a description" do test "on a description" do
assert Diff.new( assert Diff.new(
additions: Graph.new([ additions:
EX.S1 Graph.new([
|> EX.p(EX.O3) EX.S1
|> EX.p3(EX.O), |> EX.p(EX.O3)
EX.S3 |> EX.p3(EX.O),
|> EX.p(EX.O) EX.S3
]), |> EX.p(EX.O)
deletions: Graph.new([ ]),
EX.S1 deletions:
|> EX.p(EX.O1) Graph.new([
|> EX.p2(EX.O), EX.S1
])) |> EX.p(EX.O1)
|> EX.p2(EX.O)
])
)
|> Diff.apply( |> Diff.apply(
EX.S1 EX.S1
|> EX.p(EX.O1, EX.O2) |> EX.p(EX.O1, EX.O2)
|> EX.p2(EX.O) |> EX.p2(EX.O)
) == ) ==
Graph.new([ Graph.new([
EX.S1 EX.S1
|> EX.p(EX.O2, EX.O3) |> EX.p(EX.O2, EX.O3)
@ -269,21 +324,26 @@ defmodule RDF.DiffTest do
test "when the statements to be deleted are not present" do test "when the statements to be deleted are not present" do
assert Diff.new( assert Diff.new(
additions: Graph.new( additions:
Graph.new(
EX.S1
|> EX.p(EX.O4)
),
deletions:
Graph.new([
EX.S1
|> EX.p(EX.O2, EX.O3)
|> EX.p2(EX.O),
EX.S2
|> EX.p(EX.O)
])
)
|> Diff.apply(
Graph.new(
EX.S1 EX.S1
|> EX.p(EX.O4) |> EX.p(EX.O1, EX.O2)
), )
deletions: Graph.new([ ) ==
EX.S1
|> EX.p(EX.O2, EX.O3)
|> EX.p2(EX.O),
EX.S2
|> EX.p(EX.O)
]))
|> Diff.apply(Graph.new(
EX.S1
|> EX.p(EX.O1, EX.O2)
)) ==
Graph.new( Graph.new(
EX.S1 EX.S1
|> EX.p(EX.O1, EX.O4) |> EX.p(EX.O1, EX.O4)

View File

@ -1,36 +1,44 @@
defmodule RDF.EqualityTest do defmodule RDF.EqualityTest do
use RDF.Test.Case use RDF.Test.Case
alias RDF.TestDatatypes.{Initials, CustomTime, DateWithoutTz, DateTimeWithTz, Age, alias RDF.TestDatatypes.{
DecimalUnitInterval, DoubleUnitInterval, FloatUnitInterval} Initials,
CustomTime,
DateWithoutTz,
DateTimeWithTz,
Age,
DecimalUnitInterval,
DoubleUnitInterval,
FloatUnitInterval
}
describe "RDF.IRI and XSD.AnyURI" do describe "RDF.IRI and XSD.AnyURI" do
@term_equal_iris [ @term_equal_iris [
{RDF.iri("http://example.com/"), RDF.iri("http://example.com/")}, {RDF.iri("http://example.com/"), RDF.iri("http://example.com/")},
{XSD.anyURI("http://example.com/"), XSD.anyURI("http://example.com/")}, {XSD.anyURI("http://example.com/"), XSD.anyURI("http://example.com/")}
] ]
@value_equal_iris [ @value_equal_iris [
{RDF.iri("http://example.com/"), XSD.anyURI("http://example.com/")}, {RDF.iri("http://example.com/"), XSD.anyURI("http://example.com/")}
] ]
@unequal_iris [ @unequal_iris [
{RDF.iri("http://example.com/foo"), RDF.iri("http://example.com/bar")}, {RDF.iri("http://example.com/foo"), RDF.iri("http://example.com/bar")},
{RDF.iri("http://example.com/foo"), XSD.anyURI("http://example.com/bar")}, {RDF.iri("http://example.com/foo"), XSD.anyURI("http://example.com/bar")}
] ]
@equal_iris_by_coercion [ @equal_iris_by_coercion [
{RDF.iri("http://example.com/"), URI.parse("http://example.com/")}, {RDF.iri("http://example.com/"), URI.parse("http://example.com/")},
{XSD.anyURI("http://example.com/"), URI.parse("http://example.com/")}, {XSD.anyURI("http://example.com/"), URI.parse("http://example.com/")},
{RDF.iri("http://example.com/Foo"), EX.Foo}, {RDF.iri("http://example.com/Foo"), EX.Foo},
{XSD.anyURI("http://example.com/Foo"), EX.Foo}, {XSD.anyURI("http://example.com/Foo"), EX.Foo}
] ]
@unequal_iris_by_coercion [ @unequal_iris_by_coercion [
{RDF.iri("http://example.com/foo"), URI.parse("http://example.com/bar")}, {RDF.iri("http://example.com/foo"), URI.parse("http://example.com/bar")},
{XSD.anyURI("http://example.com/foo"), URI.parse("http://example.com/bar")}, {XSD.anyURI("http://example.com/foo"), URI.parse("http://example.com/bar")},
{RDF.iri("http://example.com/Bar"), EX.Foo}, {RDF.iri("http://example.com/Bar"), EX.Foo},
{XSD.anyURI("http://example.com/Bar"), EX.Foo}, {XSD.anyURI("http://example.com/Bar"), EX.Foo}
] ]
@incomparable_iris [ @incomparable_iris [
{RDF.iri("http://example.com/"), XSD.string("http://example.com/")}, {RDF.iri("http://example.com/"), XSD.string("http://example.com/")},
{XSD.anyURI("http://example.com/"), XSD.string("http://example.com/")}, {XSD.anyURI("http://example.com/"), XSD.string("http://example.com/")}
] ]
test "term equality", do: assert_term_equal(@term_equal_iris) test "term equality", do: assert_term_equal(@term_equal_iris)
@ -43,26 +51,25 @@ defmodule RDF.EqualityTest do
describe "RDF.BlankNode" do describe "RDF.BlankNode" do
@term_equal_bnodes [ @term_equal_bnodes [
{RDF.bnode("foo"), RDF.bnode("foo")}, {RDF.bnode("foo"), RDF.bnode("foo")}
]
@value_equal_bnodes [
] ]
@value_equal_bnodes []
@unequal_bnodes [ @unequal_bnodes [
{RDF.bnode("foo"), RDF.bnode("bar")}, {RDF.bnode("foo"), RDF.bnode("bar")}
] ]
@equal_bnodes_by_coercion [] @equal_bnodes_by_coercion []
@unequal_bnodes_by_coercion [] @unequal_bnodes_by_coercion []
@incomparable_bnodes [ @incomparable_bnodes [
{RDF.bnode("foo"), XSD.string("foo")}, {RDF.bnode("foo"), XSD.string("foo")},
{XSD.string("foo"), RDF.bnode("foo")}, {XSD.string("foo"), RDF.bnode("foo")}
] ]
test "term equality", do: assert_term_equal @term_equal_bnodes test "term equality", do: assert_term_equal(@term_equal_bnodes)
test "value equality", do: assert_value_equal @value_equal_bnodes test "value equality", do: assert_value_equal(@value_equal_bnodes)
test "inequality", do: assert_unequal @unequal_bnodes test "inequality", do: assert_unequal(@unequal_bnodes)
test "coerced value equality", do: assert_coerced_equal(@equal_bnodes_by_coercion) test "coerced value equality", do: assert_coerced_equal(@equal_bnodes_by_coercion)
test "coerced value inequality", do: assert_coerced_unequal(@unequal_bnodes_by_coercion) test "coerced value inequality", do: assert_coerced_unequal(@unequal_bnodes_by_coercion)
test "incomparability", do: assert_incomparable @incomparable_bnodes test "incomparability", do: assert_incomparable(@incomparable_bnodes)
end end
describe "XSD.String and RDF.LangString" do describe "XSD.String and RDF.LangString" do
@ -88,7 +95,7 @@ defmodule RDF.EqualityTest do
{RDF.lang_string("foo", language: "de"), "foo"}, {RDF.lang_string("foo", language: "de"), "foo"},
{XSD.string("foo"), RDF.lang_string("foo", language: "de")}, {XSD.string("foo"), RDF.lang_string("foo", language: "de")},
{RDF.lang_string("foo", language: "de"), XSD.string("foo")}, {RDF.lang_string("foo", language: "de"), XSD.string("foo")},
{XSD.string("foo"), RDF.bnode("foo")}, {XSD.string("foo"), RDF.bnode("foo")}
] ]
test "term equality", do: assert_term_equal(@term_equal_strings) test "term equality", do: assert_term_equal(@term_equal_strings)
@ -159,7 +166,7 @@ defmodule RDF.EqualityTest do
{XSD.decimal("-42.0"), XSD.decimal(-42.0)}, {XSD.decimal("-42.0"), XSD.decimal(-42.0)},
{XSD.decimal("1.0"), XSD.decimal(1.0)}, {XSD.decimal("1.0"), XSD.decimal(1.0)},
{Age.new("42"), Age.new("42")}, {Age.new("42"), Age.new("42")},
{DecimalUnitInterval.new("0.1"), DecimalUnitInterval.new("0.1")}, {DecimalUnitInterval.new("0.1"), DecimalUnitInterval.new("0.1")}
] ]
@value_equal_numerics [ @value_equal_numerics [
{XSD.integer("42"), XSD.non_negative_integer("42")}, {XSD.integer("42"), XSD.non_negative_integer("42")},
@ -200,7 +207,7 @@ defmodule RDF.EqualityTest do
{XSD.integer(1), XSD.integer(2)}, {XSD.integer(1), XSD.integer(2)},
{XSD.integer("1"), XSD.double("1.1")}, {XSD.integer("1"), XSD.double("1.1")},
{XSD.integer("1"), XSD.decimal("1.1")}, {XSD.integer("1"), XSD.decimal("1.1")},
{DecimalUnitInterval.new(0.1), DoubleUnitInterval.new(0.2)}, {DecimalUnitInterval.new(0.1), DoubleUnitInterval.new(0.2)}
] ]
@equal_numerics_by_coercion [ @equal_numerics_by_coercion [
{XSD.integer(42), 42}, {XSD.integer(42), 42},
@ -228,7 +235,7 @@ defmodule RDF.EqualityTest do
{XSD.float("foo"), XSD.float("foo")}, {XSD.float("foo"), XSD.float("foo")},
{XSD.non_negative_integer("foo"), XSD.non_negative_integer("foo")}, {XSD.non_negative_integer("foo"), XSD.non_negative_integer("foo")},
{XSD.positive_integer("foo"), XSD.positive_integer("foo")}, {XSD.positive_integer("foo"), XSD.positive_integer("foo")},
{DecimalUnitInterval.new(1.1), DecimalUnitInterval.new(1.1)}, {DecimalUnitInterval.new(1.1), DecimalUnitInterval.new(1.1)}
] ]
@unequal_invalid_numerics [ @unequal_invalid_numerics [
{XSD.integer("foo"), XSD.integer("bar")}, {XSD.integer("foo"), XSD.integer("bar")},
@ -239,7 +246,7 @@ defmodule RDF.EqualityTest do
{XSD.float("foo"), XSD.float("bar")}, {XSD.float("foo"), XSD.float("bar")},
{XSD.non_negative_integer("foo"), XSD.non_negative_integer("bar")}, {XSD.non_negative_integer("foo"), XSD.non_negative_integer("bar")},
{XSD.positive_integer("foo"), XSD.positive_integer("bar")}, {XSD.positive_integer("foo"), XSD.positive_integer("bar")},
{DecimalUnitInterval.new(1.1), DoubleUnitInterval.new(1.2)}, {DecimalUnitInterval.new(1.1), DoubleUnitInterval.new(1.2)}
] ]
@incomparable_numerics [ @incomparable_numerics [
{XSD.integer("42"), nil}, {XSD.integer("42"), nil},
@ -266,7 +273,7 @@ defmodule RDF.EqualityTest do
@term_equal_datetimes [ @term_equal_datetimes [
{XSD.datetime("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T12:00:00-01:00")}, {XSD.datetime("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T12:00:00-01:00")},
{XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T12:00:00")}, {XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T12:00:00")},
{DateTimeWithTz.new("2002-04-02T12:00:00Z"), DateTimeWithTz.new("2002-04-02T12:00:00Z")}, {DateTimeWithTz.new("2002-04-02T12:00:00Z"), DateTimeWithTz.new("2002-04-02T12:00:00Z")}
] ]
@value_equal_datetimes [ @value_equal_datetimes [
{XSD.datetime("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T17:00:00+04:00")}, {XSD.datetime("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T17:00:00+04:00")},
@ -278,10 +285,10 @@ defmodule RDF.EqualityTest do
{XSD.datetime("2002-04-02T23:00:00+00:00"), XSD.datetime("2002-04-02T23:00:00-00:00")}, {XSD.datetime("2002-04-02T23:00:00+00:00"), XSD.datetime("2002-04-02T23:00:00-00:00")},
{XSD.datetime("2010-01-01T00:00:00.0000Z"), XSD.datetime("2010-01-01T00:00:00Z")}, {XSD.datetime("2010-01-01T00:00:00.0000Z"), XSD.datetime("2010-01-01T00:00:00Z")},
{XSD.datetime("2005-04-04T24:00:00"), XSD.datetime("2005-04-05T00:00:00")}, {XSD.datetime("2005-04-04T24:00:00"), XSD.datetime("2005-04-05T00:00:00")},
{DateTimeWithTz.new("2002-04-02T12:00:00-01:00"),
{DateTimeWithTz.new("2002-04-02T12:00:00-01:00"), DateTimeWithTz.new("2002-04-02T17:00:00+04:00")}, DateTimeWithTz.new("2002-04-02T17:00:00+04:00")},
{DateTimeWithTz.new("2002-04-02T23:00:00Z"), XSD.datetime("2002-04-02T23:00:00+00:00")}, {DateTimeWithTz.new("2002-04-02T23:00:00Z"), XSD.datetime("2002-04-02T23:00:00+00:00")},
{XSD.datetime("2002-04-02T23:00:00+00:00"), DateTimeWithTz.new("2002-04-02T23:00:00-00:00")}, {XSD.datetime("2002-04-02T23:00:00+00:00"), DateTimeWithTz.new("2002-04-02T23:00:00-00:00")}
] ]
@unequal_datetimes [ @unequal_datetimes [
{XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T17:00:00")}, {XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T17:00:00")},
@ -290,20 +297,20 @@ defmodule RDF.EqualityTest do
] ]
@equal_datetimes_by_coercion [ @equal_datetimes_by_coercion [
{XSD.datetime("2002-04-02T12:00:00-01:00"), {XSD.datetime("2002-04-02T12:00:00-01:00"),
elem(DateTime.from_iso8601("2002-04-02T12:00:00-01:00"), 1)}, elem(DateTime.from_iso8601("2002-04-02T12:00:00-01:00"), 1)},
{XSD.datetime("2002-04-02T12:00:00"), ~N"2002-04-02T12:00:00"}, {XSD.datetime("2002-04-02T12:00:00"), ~N"2002-04-02T12:00:00"},
{XSD.datetime("2002-04-02T23:00:00Z"), {XSD.datetime("2002-04-02T23:00:00Z"),
elem(DateTime.from_iso8601("2002-04-02T23:00:00+00:00"), 1)}, elem(DateTime.from_iso8601("2002-04-02T23:00:00+00:00"), 1)},
{XSD.datetime("2002-04-02T23:00:00+00:00"), {XSD.datetime("2002-04-02T23:00:00+00:00"),
elem(DateTime.from_iso8601("2002-04-02T23:00:00Z"), 1)}, elem(DateTime.from_iso8601("2002-04-02T23:00:00Z"), 1)},
{XSD.datetime("2002-04-02T23:00:00-00:00"), {XSD.datetime("2002-04-02T23:00:00-00:00"),
elem(DateTime.from_iso8601("2002-04-02T23:00:00Z"), 1)}, elem(DateTime.from_iso8601("2002-04-02T23:00:00Z"), 1)},
{XSD.datetime("2002-04-02T23:00:00-00:00"), {XSD.datetime("2002-04-02T23:00:00-00:00"),
elem(DateTime.from_iso8601("2002-04-02T23:00:00+00:00"), 1)} elem(DateTime.from_iso8601("2002-04-02T23:00:00+00:00"), 1)}
] ]
@unequal_datetimes_by_coercion [ @unequal_datetimes_by_coercion [
{XSD.datetime("2002-04-02T12:00:00-01:00"), {XSD.datetime("2002-04-02T12:00:00-01:00"),
elem(DateTime.from_iso8601("2002-04-02T12:00:00+00:00"), 1)} elem(DateTime.from_iso8601("2002-04-02T12:00:00+00:00"), 1)}
] ]
@equal_invalid_datetimes [ @equal_invalid_datetimes [
{XSD.datetime("foo"), XSD.datetime("foo")}, {XSD.datetime("foo"), XSD.datetime("foo")},
@ -320,7 +327,7 @@ defmodule RDF.EqualityTest do
{XSD.string("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T12:00:00-01:00")}, {XSD.string("2002-04-02T12:00:00-01:00"), XSD.datetime("2002-04-02T12:00:00-01:00")},
# These are incomparable because of indeterminacy due to missing timezone # These are incomparable because of indeterminacy due to missing timezone
{XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T23:00:00+00:00")}, {XSD.datetime("2002-04-02T12:00:00"), XSD.datetime("2002-04-02T23:00:00+00:00")},
{XSD.datetime("2002-04-02T12:00:00"), DateTimeWithTz.new("2002-04-02T12:00:00Z")}, {XSD.datetime("2002-04-02T12:00:00"), DateTimeWithTz.new("2002-04-02T12:00:00Z")}
] ]
test "term equality", do: assert_term_equal(@term_equal_datetimes) test "term equality", do: assert_term_equal(@term_equal_datetimes)
@ -337,17 +344,17 @@ defmodule RDF.EqualityTest do
@term_equal_dates [ @term_equal_dates [
{XSD.date("2002-04-02-01:00"), XSD.date("2002-04-02-01:00")}, {XSD.date("2002-04-02-01:00"), XSD.date("2002-04-02-01:00")},
{XSD.date("2002-04-02"), XSD.date("2002-04-02")}, {XSD.date("2002-04-02"), XSD.date("2002-04-02")},
{DateWithoutTz.new("2002-04-02"), DateWithoutTz.new("2002-04-02")}, {DateWithoutTz.new("2002-04-02"), DateWithoutTz.new("2002-04-02")}
] ]
@value_equal_dates [ @value_equal_dates [
{XSD.date("2002-04-02-00:00"), XSD.date("2002-04-02+00:00")}, {XSD.date("2002-04-02-00:00"), XSD.date("2002-04-02+00:00")},
{XSD.date("2002-04-02Z"), XSD.date("2002-04-02+00:00")}, {XSD.date("2002-04-02Z"), XSD.date("2002-04-02+00:00")},
{XSD.date("2002-04-02Z"), XSD.date("2002-04-02-00:00")}, {XSD.date("2002-04-02Z"), XSD.date("2002-04-02-00:00")},
{XSD.date("2002-04-02"), DateWithoutTz.new("2002-04-02")}, {XSD.date("2002-04-02"), DateWithoutTz.new("2002-04-02")}
] ]
@unequal_dates [ @unequal_dates [
{XSD.date("2002-04-01"), XSD.date("2002-04-02")}, {XSD.date("2002-04-01"), XSD.date("2002-04-02")},
{DateWithoutTz.new("2002-04-02"), DateWithoutTz.new("2002-04-01")}, {DateWithoutTz.new("2002-04-02"), DateWithoutTz.new("2002-04-01")}
] ]
@equal_dates_by_coercion [ @equal_dates_by_coercion [
{XSD.date("2002-04-02"), Date.from_iso8601!("2002-04-02")} {XSD.date("2002-04-02"), Date.from_iso8601!("2002-04-02")}
@ -357,13 +364,13 @@ defmodule RDF.EqualityTest do
] ]
@equal_invalid_dates [ @equal_invalid_dates [
{XSD.date("foo"), XSD.date("foo")}, {XSD.date("foo"), XSD.date("foo")},
{DateWithoutTz.new("foo"), DateWithoutTz.new("foo")}, {DateWithoutTz.new("foo"), DateWithoutTz.new("foo")}
] ]
@unequal_invalid_dates [ @unequal_invalid_dates [
{XSD.date("2002.04.02"), XSD.date("2002-04-02")}, {XSD.date("2002.04.02"), XSD.date("2002-04-02")},
{XSD.date("foo"), XSD.date("bar")}, {XSD.date("foo"), XSD.date("bar")},
{DateWithoutTz.new("foo"), DateWithoutTz.new("bar")}, {DateWithoutTz.new("foo"), DateWithoutTz.new("bar")},
{XSD.date("foo"), DateWithoutTz.new("bar")}, {XSD.date("foo"), DateWithoutTz.new("bar")}
] ]
@incomparable_dates [ @incomparable_dates [
{XSD.date("2002-04-02"), XSD.string("2002-04-02")}, {XSD.date("2002-04-02"), XSD.string("2002-04-02")},
@ -434,17 +441,17 @@ defmodule RDF.EqualityTest do
@term_equal_times [ @term_equal_times [
{XSD.time("12:00:00+01:00"), XSD.time("12:00:00+01:00")}, {XSD.time("12:00:00+01:00"), XSD.time("12:00:00+01:00")},
{XSD.time("12:00:00"), XSD.time("12:00:00")}, {XSD.time("12:00:00"), XSD.time("12:00:00")},
{CustomTime.new("00:00:00Z"), CustomTime.new("00:00:00Z")}, {CustomTime.new("00:00:00Z"), CustomTime.new("00:00:00Z")}
] ]
@value_equal_times [ @value_equal_times [
{XSD.time("00:00:00+00:00"), XSD.time("00:00:00Z")}, {XSD.time("00:00:00+00:00"), XSD.time("00:00:00Z")},
{XSD.time("00:00:00+00:00"), CustomTime.new("00:00:00Z")}, {XSD.time("00:00:00+00:00"), CustomTime.new("00:00:00Z")},
{CustomTime.new("00:00:00+00:00"), CustomTime.new("00:00:00Z")}, {CustomTime.new("00:00:00+00:00"), CustomTime.new("00:00:00Z")}
] ]
@unequal_times [ @unequal_times [
{XSD.time("12:00:00"), XSD.time("13:00:00")}, {XSD.time("12:00:00"), XSD.time("13:00:00")},
{XSD.time("00:00:00.0000Z"), XSD.time("00:00:00Z")}, {XSD.time("00:00:00.0000Z"), XSD.time("00:00:00Z")},
{XSD.time("00:00:00.0000Z"), CustomTime.new("00:00:00Z")}, {XSD.time("00:00:00.0000Z"), CustomTime.new("00:00:00Z")}
] ]
@equal_times_by_coercion [ @equal_times_by_coercion [
{XSD.time("12:00:00"), Time.from_iso8601!("12:00:00")} {XSD.time("12:00:00"), Time.from_iso8601!("12:00:00")}
@ -454,11 +461,11 @@ defmodule RDF.EqualityTest do
] ]
@equal_invalid_times [ @equal_invalid_times [
{XSD.time("foo"), XSD.time("foo")}, {XSD.time("foo"), XSD.time("foo")},
{CustomTime.new("foo"), CustomTime.new("foo")}, {CustomTime.new("foo"), CustomTime.new("foo")}
] ]
@unequal_invalid_times [ @unequal_invalid_times [
{XSD.time("foo"), XSD.time("bar")}, {XSD.time("foo"), XSD.time("bar")},
{XSD.time("foo"), CustomTime.new("bar")}, {XSD.time("foo"), CustomTime.new("bar")}
] ]
@incomparable_times [ @incomparable_times [
{XSD.time("12:00:00"), XSD.string("12:00:00")}, {XSD.time("12:00:00"), XSD.string("12:00:00")},
@ -508,20 +515,20 @@ defmodule RDF.EqualityTest do
describe "RDF.Literal.Generics" do describe "RDF.Literal.Generics" do
@equal_literals [ @equal_literals [
{RDF.literal("foo", datatype: "http://example.com/datatype"), {RDF.literal("foo", datatype: "http://example.com/datatype"),
RDF.literal("foo", datatype: "http://example.com/datatype")}, RDF.literal("foo", datatype: "http://example.com/datatype")}
] ]
@unequal_literals [ @unequal_literals [
{RDF.literal("foo", datatype: "http://example.com/datatype"), {RDF.literal("foo", datatype: "http://example.com/datatype"),
RDF.literal("bar", datatype: "http://example.com/datatype")}, RDF.literal("bar", datatype: "http://example.com/datatype")}
] ]
@incomparable_literals [ @incomparable_literals [
{RDF.literal("foo", datatype: "http://example.com/datatype1"), {RDF.literal("foo", datatype: "http://example.com/datatype1"),
RDF.literal("foo", datatype: "http://example.com/datatype2")}, RDF.literal("foo", datatype: "http://example.com/datatype2")}
] ]
test "equality", do: assert_term_equal @equal_literals test "equality", do: assert_term_equal(@equal_literals)
test "inequality", do: assert_unequal @unequal_literals test "inequality", do: assert_unequal(@unequal_literals)
test "incomparability", do: assert_incomparable @incomparable_literals test "incomparability", do: assert_incomparable(@incomparable_literals)
end end
defp assert_term_equal(examples) do defp assert_term_equal(examples) do

File diff suppressed because it is too large Load Diff

View File

@ -3,39 +3,37 @@ defmodule RDF.IRITest do
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab EX, defvocab EX, base_iri: "http://example.com/#", terms: [], strict: false
base_iri: "http://example.com/#",
terms: [], strict: false
doctest RDF.IRI doctest RDF.IRI
alias RDF.IRI alias RDF.IRI
@absolute_iris [ @absolute_iris [
"http://www.example.com/foo/", "http://www.example.com/foo/",
%IRI{value: "http://www.example.com/foo/"}, %IRI{value: "http://www.example.com/foo/"},
URI.parse("http://www.example.com/foo/"), URI.parse("http://www.example.com/foo/"),
"http://www.example.com/foo#", "http://www.example.com/foo#",
%IRI{value: "http://www.example.com/foo#"}, %IRI{value: "http://www.example.com/foo#"},
URI.parse("http://www.example.com/foo#"), URI.parse("http://www.example.com/foo#"),
"https://en.wiktionary.org/wiki/Ῥόδος", "https://en.wiktionary.org/wiki/Ῥόδος",
%IRI{value: "https://en.wiktionary.org/wiki/Ῥόδος"}, %IRI{value: "https://en.wiktionary.org/wiki/Ῥόδος"},
URI.parse("https://en.wiktionary.org/wiki/Ῥόδος"), URI.parse("https://en.wiktionary.org/wiki/Ῥόδος")
] ]
@relative_iris [ @relative_iris [
"/relative/", "/relative/",
%IRI{value: "/relative/"}, %IRI{value: "/relative/"},
URI.parse("/relative/"), URI.parse("/relative/"),
"/Ῥόδος/", "/Ῥόδος/",
%IRI{value: "/Ῥόδος/"}, %IRI{value: "/Ῥόδος/"},
URI.parse("/Ῥόδος/"), URI.parse("/Ῥόδος/")
] ]
def absolute_iris, do: @absolute_iris def absolute_iris, do: @absolute_iris
def relative_iris, do: @relative_iris def relative_iris, do: @relative_iris
def valid_iris, do: @absolute_iris def valid_iris, do: @absolute_iris
def invalid_iris, do: nil # TODO: # TODO:
def invalid_iris, do: nil
describe "new/1" do describe "new/1" do
test "with a string" do test "with a string" do
@ -65,7 +63,6 @@ defmodule RDF.IRITest do
end end
end end
describe "new!/1" do describe "new!/1" do
test "with valid iris" do test "with valid iris" do
Enum.each(valid_iris(), fn valid_iri -> Enum.each(valid_iris(), fn valid_iri ->
@ -105,7 +102,6 @@ defmodule RDF.IRITest do
end end
end end
describe "coerce_base/1" do describe "coerce_base/1" do
test "with a string" do test "with a string" do
assert IRI.coerce_base("http://example.com/") == IRI.new("http://example.com/") assert IRI.coerce_base("http://example.com/") == IRI.new("http://example.com/")
@ -135,7 +131,7 @@ defmodule RDF.IRITest do
end end
test "with a RDF.Vocabulary.Namespace module" do test "with a RDF.Vocabulary.Namespace module" do
assert IRI.coerce_base(EX) == IRI.new(EX.__base_iri__) assert IRI.coerce_base(EX) == IRI.new(EX.__base_iri__())
end end
test "with a RDF.Vocabulary.Namespace module which is not loaded yet" do test "with a RDF.Vocabulary.Namespace module which is not loaded yet" do
@ -143,7 +139,6 @@ defmodule RDF.IRITest do
end end
end end
describe "valid!/1" do describe "valid!/1" do
test "with valid iris" do test "with valid iris" do
Enum.each(valid_iris(), fn valid_iri -> Enum.each(valid_iris(), fn valid_iri ->
@ -180,7 +175,6 @@ defmodule RDF.IRITest do
end end
end end
describe "valid?/1" do describe "valid?/1" do
test "with valid iris" do test "with valid iris" do
Enum.each(valid_iris(), fn valid_iri -> Enum.each(valid_iris(), fn valid_iri ->
@ -213,7 +207,6 @@ defmodule RDF.IRITest do
end end
end end
describe "absolute?/1" do describe "absolute?/1" do
test "with absolute iris" do test "with absolute iris" do
Enum.each(absolute_iris(), fn absolute_iri -> Enum.each(absolute_iris(), fn absolute_iri ->
@ -246,7 +239,6 @@ defmodule RDF.IRITest do
end end
end end
describe "absolute/2" do describe "absolute/2" do
test "with an already absolute iri" do test "with an already absolute iri" do
for absolute_iri <- absolute_iris(), for absolute_iri <- absolute_iris(),
@ -258,7 +250,7 @@ defmodule RDF.IRITest do
test "with a relative iri" do test "with a relative iri" do
for relative_iri <- relative_iris(), base_iri <- absolute_iris() do for relative_iri <- relative_iris(), base_iri <- absolute_iris() do
assert IRI.absolute(relative_iri, base_iri) == assert IRI.absolute(relative_iri, base_iri) ==
IRI.merge(base_iri, relative_iri) IRI.merge(base_iri, relative_iri)
end end
end end
@ -269,28 +261,25 @@ defmodule RDF.IRITest do
end end
end end
describe "merge/2" do describe "merge/2" do
test "with a valid absolute base iri and a valid relative iri" do test "with a valid absolute base iri and a valid relative iri" do
for base_iri <- absolute_iris(), relative_iri <- relative_iris() do for base_iri <- absolute_iris(), relative_iri <- relative_iris() do
assert IRI.merge(base_iri, relative_iri) == ( assert IRI.merge(base_iri, relative_iri) ==
base_iri base_iri
|> to_string |> to_string
|> URI.merge(to_string(relative_iri)) |> URI.merge(to_string(relative_iri))
|> IRI.new |> IRI.new()
) end
end
end end
test "with a valid absolute base iri and a valid absolute iri" do test "with a valid absolute base iri and a valid absolute iri" do
for base_iri <- absolute_iris(), absolute_iri <- absolute_iris() do for base_iri <- absolute_iris(), absolute_iri <- absolute_iris() do
assert IRI.merge(base_iri, absolute_iri) == ( assert IRI.merge(base_iri, absolute_iri) ==
base_iri base_iri
|> to_string |> to_string
|> URI.merge(to_string(absolute_iri)) |> URI.merge(to_string(absolute_iri))
|> IRI.new |> IRI.new()
) end
end
end end
test "with a relative base iri" do test "with a relative base iri" do
@ -302,7 +291,7 @@ defmodule RDF.IRITest do
end end
test "with empty fragments" do test "with empty fragments" do
assert IRI.merge("http://example.com/","foo#") == IRI.new("http://example.com/foo#") assert IRI.merge("http://example.com/", "foo#") == IRI.new("http://example.com/foo#")
end end
@tag skip: "TODO: proper validation" @tag skip: "TODO: proper validation"
@ -316,17 +305,16 @@ defmodule RDF.IRITest do
describe "parse/1" do describe "parse/1" do
test "with absolute and relative iris" do test "with absolute and relative iris" do
Enum.each(absolute_iris() ++ relative_iris(), fn iri -> Enum.each(absolute_iris() ++ relative_iris(), fn iri ->
assert IRI.parse(iri) == ( assert IRI.parse(iri) ==
iri iri
|> IRI.new |> IRI.new()
|> to_string() |> to_string()
|> URI.parse |> URI.parse()
)
end) end)
end end
test "with a resolvable atom" do test "with a resolvable atom" do
assert IRI.parse(EX.Foo) == (EX.Foo |> IRI.new |> IRI.parse) assert IRI.parse(EX.Foo) == EX.Foo |> IRI.new() |> IRI.parse()
end end
test "with empty fragments" do test "with empty fragments" do
@ -354,7 +342,7 @@ defmodule RDF.IRITest do
test "with IRI resolvable namespace terms" do test "with IRI resolvable namespace terms" do
assert IRI.to_string(EX.Foo) == "http://example.com/#Foo" assert IRI.to_string(EX.Foo) == "http://example.com/#Foo"
assert IRI.to_string(EX.foo) == "http://example.com/#foo" assert IRI.to_string(EX.foo()) == "http://example.com/#foo"
end end
test "with non-resolvable atoms" do test "with non-resolvable atoms" do
@ -369,5 +357,4 @@ defmodule RDF.IRITest do
test "Inspect protocol implementation" do test "Inspect protocol implementation" do
assert inspect(IRI.new("http://example.com/")) == "~I<http://example.com/>" assert inspect(IRI.new("http://example.com/")) == "~I<http://example.com/>"
end end
end end

View File

@ -9,152 +9,172 @@ defmodule RDF.ListTest do
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab EX, defvocab EX, base_iri: "http://example.org/#", terms: [], strict: false
base_iri: "http://example.org/#",
terms: [], strict: false
setup do setup do
{:ok, {:ok,
empty: RDF.List.new(RDF.nil, Graph.new), empty: RDF.List.new(RDF.nil(), Graph.new()),
one: RDF.List.from([EX.element], head: ~B<one>), one: RDF.List.from([EX.element()], head: ~B<one>),
abc: RDF.List.from(~w[a b c], head: ~B<abc>), abc: RDF.List.from(~w[a b c], head: ~B<abc>),
ten: RDF.List.from(Enum.to_list(1..10), head: ~B<ten>), ten: RDF.List.from(Enum.to_list(1..10), head: ~B<ten>),
nested: RDF.List.from(["foo", [1, 2], "bar"], head: ~B<nested>), nested: RDF.List.from(["foo", [1, 2], "bar"], head: ~B<nested>)}
}
end end
describe "new/2" do describe "new/2" do
####################################################################### #######################################################################
# success cases # success cases
test "valid head list node" do test "valid head list node" do
graph = Graph.new( graph =
~B<Foo> Graph.new(
|> RDF.first(1) ~B<Foo>
|> RDF.rest(~B<Bar>)) |> RDF.first(1)
|> Graph.add( |> RDF.rest(~B<Bar>)
~B<Bar> )
|> RDF.first(2) |> Graph.add(
|> RDF.rest(RDF.nil)) ~B<Bar>
|> RDF.first(2)
|> RDF.rest(RDF.nil())
)
assert %RDF.List{} = list = RDF.List.new(~B<Foo>, graph) assert %RDF.List{} = list = RDF.List.new(~B<Foo>, graph)
assert list.head == ~B<Foo> assert list.head == ~B<Foo>
assert list.graph == graph assert list.graph == graph
end end
test "with non-blank list nodes" do test "with non-blank list nodes" do
graph = Graph.new( graph =
EX.Foo Graph.new(
|> RDF.first(1) EX.Foo
|> RDF.rest(RDF.nil)) |> RDF.first(1)
|> RDF.rest(RDF.nil())
)
assert %RDF.List{} = list = RDF.List.new(EX.Foo, graph) assert %RDF.List{} = list = RDF.List.new(EX.Foo, graph)
assert list.head == iri(EX.Foo) assert list.head == iri(EX.Foo)
end end
test "with other properties on its nodes" do test "with other properties on its nodes" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> EX.other(EX.Property) |> EX.other(EX.Property)
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(~B<Bar>)) |> RDF.rest(~B<Bar>)
)
|> Graph.add( |> Graph.add(
~B<Bar> ~B<Bar>
|> EX.other(EX.Property2) |> EX.other(EX.Property2)
|> RDF.first(2) |> RDF.first(2)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) )
|> RDF.List.valid? == true |> RDF.List.valid?() == true
end end
####################################################################### #######################################################################
# failure cases # failure cases
test "when given list node doesn't exist in the given graph" do test "when given list node doesn't exist in the given graph" do
assert RDF.List.new(RDF.bnode, RDF.Graph.new) == nil assert RDF.List.new(RDF.bnode(), RDF.Graph.new()) == nil
end end
test "When the given head node is not a list" do test "When the given head node is not a list" do
assert RDF.List.new(42, RDF.Graph.new) == nil assert RDF.List.new(42, RDF.Graph.new()) == nil
assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, EX.bar, EX.Baz})) == nil assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, EX.bar(), EX.Baz})) == nil
assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first, EX.Baz})) == nil assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first(), EX.Baz})) == nil
end end
test "when list nodes are incomplete" do test "when list nodes are incomplete" do
assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first, EX.Baz})) == nil assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first(), EX.Baz})) == nil
assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.rest, RDF.nil})) == nil assert RDF.List.new(EX.Foo, RDF.Graph.new({EX.Foo, RDF.rest(), RDF.nil()})) == nil
end end
test "when head node has multiple rdf:first objects" do test "when head node has multiple rdf:first objects" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1, 2) |> RDF.first(1, 2)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) == nil ) == nil
end end
test "when later list nodes have multiple rdf:first objects" do test "when later list nodes have multiple rdf:first objects" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(~B<Bar>)) |> RDF.rest(~B<Bar>)
)
|> Graph.add( |> Graph.add(
~B<Bar> ~B<Bar>
|> RDF.first(2, 3) |> RDF.first(2, 3)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) == nil ) == nil
end end
test "when list nodes have multiple rdf:rest objects" do test "when list nodes have multiple rdf:rest objects" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(~B<Bar>, ~B<Baz>)) |> RDF.rest(~B<Bar>, ~B<Baz>)
)
|> Graph.add( |> Graph.add(
~B<Bar> ~B<Bar>
|> RDF.first(2) |> RDF.first(2)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
|> Graph.add( |> Graph.add(
~B<Baz> ~B<Baz>
|> RDF.first(3) |> RDF.first(3)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) == nil ) == nil
assert RDF.List.new(~B<Foo>,
assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(~B<Bar>)) |> RDF.rest(~B<Bar>)
)
|> Graph.add( |> Graph.add(
~B<Bar> ~B<Bar>
|> RDF.first(2) |> RDF.first(2)
|> RDF.rest(RDF.nil, ~B<Baz>)) |> RDF.rest(RDF.nil(), ~B<Baz>)
)
|> Graph.add( |> Graph.add(
~B<Baz> ~B<Baz>
|> RDF.first(3) |> RDF.first(3)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) == nil ) == nil
end end
test "when the list is cyclic" do test "when the list is cyclic" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(~B<Bar>)) |> RDF.rest(~B<Bar>)
)
|> Graph.add( |> Graph.add(
~B<Bar> ~B<Bar>
|> RDF.first(2) |> RDF.first(2)
|> RDF.rest(~B<Foo>)) |> RDF.rest(~B<Foo>)
)
) == nil ) == nil
end end
end end
describe "from/1" do describe "from/1" do
test "an empty list", %{empty: empty} do test "an empty list", %{empty: empty} do
assert RDF.List.from([]) == empty assert RDF.List.from([]) == empty
@ -165,85 +185,99 @@ defmodule RDF.ListTest do
end end
%{ %{
"IRI" => iri(EX.Foo), "IRI" => iri(EX.Foo),
"blank node" => ~B<Foo>, "blank node" => ~B<Foo>,
"literal" => ~L<Foo>, "literal" => ~L<Foo>,
"string" => "Foo", "string" => "Foo",
"integer" => 42, "integer" => 42,
"float" => 3.14, "float" => 3.14,
"true" => true, "true" => true,
"false" => false, "false" => false,
"unresolved namespace-qualified name" => EX.Foo, "unresolved namespace-qualified name" => EX.Foo
} }
|> Enum.each(fn {type, element} -> |> Enum.each(fn {type, element} ->
@tag element: element @tag element: element
test "list with #{type} element", %{element: element} do test "list with #{type} element", %{element: element} do
with {bnode, graph_with_list} = one_element_list(element) do with {bnode, graph_with_list} = one_element_list(element) do
assert RDF.List.from([element], head: bnode) == assert RDF.List.from([element], head: bnode) ==
RDF.List.new(bnode, graph_with_list) RDF.List.new(bnode, graph_with_list)
end
end end
end) end
end)
test "nested list" do test "nested list" do
assert %RDF.List{head: bnode, graph: graph_with_list} = assert %RDF.List{head: bnode, graph: graph_with_list} = RDF.List.from([[1]])
RDF.List.from([[1]]) assert [nested] = get_in(graph_with_list, [bnode, RDF.first()])
assert [nested] = get_in(graph_with_list, [bnode, RDF.first]) assert get_in(graph_with_list, [bnode, RDF.rest()]) == [RDF.nil()]
assert get_in(graph_with_list, [bnode, RDF.rest]) == [RDF.nil] assert get_in(graph_with_list, [nested, RDF.first()]) == [XSD.integer(1)]
assert get_in(graph_with_list, [nested, RDF.first]) == [XSD.integer(1)] assert get_in(graph_with_list, [nested, RDF.rest()]) == [RDF.nil()]
assert get_in(graph_with_list, [nested, RDF.rest]) == [RDF.nil]
assert %RDF.List{head: bnode, graph: graph_with_list} = assert %RDF.List{head: bnode, graph: graph_with_list} =
RDF.List.from(["foo", [1, 2], "bar"]) RDF.List.from(["foo", [1, 2], "bar"])
assert get_in(graph_with_list, [bnode, RDF.first]) == [~L"foo"]
assert [second] = get_in(graph_with_list, [bnode, RDF.rest]) assert get_in(graph_with_list, [bnode, RDF.first()]) == [~L"foo"]
assert [nested] = get_in(graph_with_list, [second, RDF.first]) assert [second] = get_in(graph_with_list, [bnode, RDF.rest()])
assert get_in(graph_with_list, [nested, RDF.first]) == [XSD.integer(1)] assert [nested] = get_in(graph_with_list, [second, RDF.first()])
assert [nested_second] = get_in(graph_with_list, [nested, RDF.rest]) assert get_in(graph_with_list, [nested, RDF.first()]) == [XSD.integer(1)]
assert get_in(graph_with_list, [nested_second, RDF.first]) == [XSD.integer(2)] assert [nested_second] = get_in(graph_with_list, [nested, RDF.rest()])
assert get_in(graph_with_list, [nested_second, RDF.rest]) == [RDF.nil] assert get_in(graph_with_list, [nested_second, RDF.first()]) == [XSD.integer(2)]
assert [third] = get_in(graph_with_list, [second, RDF.rest]) assert get_in(graph_with_list, [nested_second, RDF.rest()]) == [RDF.nil()]
assert get_in(graph_with_list, [third, RDF.first]) == [~L"bar"] assert [third] = get_in(graph_with_list, [second, RDF.rest()])
assert get_in(graph_with_list, [third, RDF.rest]) == [RDF.nil] assert get_in(graph_with_list, [third, RDF.first()]) == [~L"bar"]
assert get_in(graph_with_list, [third, RDF.rest()]) == [RDF.nil()]
end end
%{ %{
"preserve order" => [3, 2, 1], "preserve order" => [3, 2, 1],
"different types" => [1, "foo", true, false, 3.14, EX.foo, EX.Foo, ~B<Foo>], "different types" => [1, "foo", true, false, 3.14, EX.foo(), EX.Foo, ~B<Foo>]
} }
|> Enum.each(fn {desc, list} -> |> Enum.each(fn {desc, list} ->
@tag list: list @tag list: list
test "list with multiple elements: #{desc}", %{list: list} do test "list with multiple elements: #{desc}", %{list: list} do
assert %RDF.List{head: bnode, graph: graph_with_list} = assert %RDF.List{head: bnode, graph: graph_with_list} = RDF.List.from(list)
RDF.List.from(list)
assert RDF.nil == assert RDF.nil() ==
Enum.reduce list, bnode, fn element, list_node -> Enum.reduce(list, bnode, fn element, list_node ->
case element do case element do
%IRI{} -> %IRI{} ->
assert get_in(graph_with_list, [list_node, RDF.first]) == [element] assert get_in(graph_with_list, [list_node, RDF.first()]) == [element]
%BlankNode{} ->
assert get_in(graph_with_list, [list_node, RDF.first]) == [element] %BlankNode{} ->
%Literal{} -> assert get_in(graph_with_list, [list_node, RDF.first()]) == [element]
assert get_in(graph_with_list, [list_node, RDF.first]) == [element]
element when is_boolean(element) -> %Literal{} ->
assert get_in(graph_with_list, [list_node, RDF.first]) == [RDF.Literal.new(element)] assert get_in(graph_with_list, [list_node, RDF.first()]) == [element]
element when is_atom(element) ->
assert get_in(graph_with_list, [list_node, RDF.first]) == [RDF.iri(element)] element when is_boolean(element) ->
_ -> assert get_in(graph_with_list, [list_node, RDF.first()]) == [
assert get_in(graph_with_list, [list_node, RDF.first]) == [RDF.Literal.new(element)] RDF.Literal.new(element)
end ]
[next] = get_in(graph_with_list, [list_node, RDF.rest])
unless next == RDF.nil do element when is_atom(element) ->
assert %BlankNode{} = next assert get_in(graph_with_list, [list_node, RDF.first()]) == [
end RDF.iri(element)
next ]
end
end _ ->
end) assert get_in(graph_with_list, [list_node, RDF.first()]) == [
RDF.Literal.new(element)
]
end
[next] = get_in(graph_with_list, [list_node, RDF.rest()])
unless next == RDF.nil() do
assert %BlankNode{} = next
end
next
end)
end
end)
test "an enumerable" do test "an enumerable" do
assert RDF.List.from(MapSet.new([42]), head: ~B<foo>) == assert RDF.List.from(MapSet.new([42]), head: ~B<foo>) ==
RDF.List.from([42], head: ~B<foo>) RDF.List.from([42], head: ~B<foo>)
end end
test "head option with unresolved namespace-qualified name" do test "head option with unresolved namespace-qualified name" do
@ -251,43 +285,42 @@ defmodule RDF.ListTest do
end end
end end
describe "values/1" do describe "values/1" do
test "the empty list", %{empty: empty} do test "the empty list", %{empty: empty} do
assert RDF.List.values(empty) == [] assert RDF.List.values(empty) == []
end end
test "list with one element", %{one: one} do test "list with one element", %{one: one} do
assert RDF.List.values(one) == [EX.element] assert RDF.List.values(one) == [EX.element()]
end end
test "list with multiple elements", %{abc: abc, ten: ten} do test "list with multiple elements", %{abc: abc, ten: ten} do
assert RDF.List.values(abc) == ~w[a b c] |> Enum.map(&Literal.new/1) assert RDF.List.values(abc) == ~w[a b c] |> Enum.map(&Literal.new/1)
assert RDF.List.values(ten) == 1..10 |> Enum.to_list |> Enum.map(&Literal.new/1) assert RDF.List.values(ten) == 1..10 |> Enum.to_list() |> Enum.map(&Literal.new/1)
end end
test "list with non-blank list nodes" do test "list with non-blank list nodes" do
assert RDF.List.from([EX.element], head: EX.Foo) assert RDF.List.from([EX.element()], head: EX.Foo)
|> RDF.List.values == [EX.element] |> RDF.List.values() == [EX.element()]
end end
test "nested list", %{nested: nested} do test "nested list", %{nested: nested} do
assert RDF.List.values(nested) == assert RDF.List.values(nested) ==
[~L"foo", [XSD.integer(1), XSD.integer(2)], ~L"bar"] [~L"foo", [XSD.integer(1), XSD.integer(2)], ~L"bar"]
assert RDF.list(["foo", [1, 2]]) |> RDF.List.values == assert RDF.list(["foo", [1, 2]]) |> RDF.List.values() ==
[~L"foo", [XSD.integer(1), XSD.integer(2)]] [~L"foo", [XSD.integer(1), XSD.integer(2)]]
assert RDF.list([[1, 2], "foo"]) |> RDF.List.values == assert RDF.list([[1, 2], "foo"]) |> RDF.List.values() ==
[[XSD.integer(1), XSD.integer(2)], ~L"foo"] [[XSD.integer(1), XSD.integer(2)], ~L"foo"]
inner_list = RDF.list([1, 2], head: ~B<inner>) inner_list = RDF.list([1, 2], head: ~B<inner>)
assert RDF.list(["foo", ~B<inner>], graph: inner_list.graph) assert RDF.list(["foo", ~B<inner>], graph: inner_list.graph)
|> RDF.List.values == [~L"foo", [XSD.integer(1), XSD.integer(2)]] |> RDF.List.values() == [~L"foo", [XSD.integer(1), XSD.integer(2)]]
end end
end end
describe "nodes/1" do describe "nodes/1" do
test "the empty list", %{empty: empty} do test "the empty list", %{empty: empty} do
assert RDF.List.nodes(empty) == [] assert RDF.List.nodes(empty) == []
@ -299,12 +332,12 @@ defmodule RDF.ListTest do
test "nested list", %{nested: nested} do test "nested list", %{nested: nested} do
assert RDF.list([[1, 2, 3]], head: ~B<outer>) assert RDF.list([[1, 2, 3]], head: ~B<outer>)
|> RDF.List.nodes == [~B<outer>] |> RDF.List.nodes() == [~B<outer>]
assert [~B<nested>, _, _] = RDF.List.nodes(nested) assert [~B<nested>, _, _] = RDF.List.nodes(nested)
end end
end end
describe "valid?/2" do describe "valid?/2" do
test "the empty list", %{empty: empty} do test "the empty list", %{empty: empty} do
assert RDF.List.valid?(empty) assert RDF.List.valid?(empty)
@ -324,25 +357,27 @@ defmodule RDF.ListTest do
end end
test "a non-blank list node is not valid" do test "a non-blank list node is not valid" do
assert RDF.list([EX.element], head: EX.Foo) |> RDF.List.valid? == false assert RDF.list([EX.element()], head: EX.Foo) |> RDF.List.valid?() == false
end end
test "a non-blank list node on later nodes makes the whole list invalid" do test "a non-blank list node on later nodes makes the whole list invalid" do
assert RDF.List.new(~B<Foo>, assert RDF.List.new(
~B<Foo>,
Graph.new( Graph.new(
~B<Foo> ~B<Foo>
|> RDF.first(1) |> RDF.first(1)
|> RDF.rest(EX.Foo)) |> RDF.rest(EX.Foo)
)
|> Graph.add( |> Graph.add(
EX.Foo EX.Foo
|> RDF.first(2) |> RDF.first(2)
|> RDF.rest(RDF.nil)) |> RDF.rest(RDF.nil())
)
) )
|> RDF.List.valid? == false |> RDF.List.valid?() == false
end end
end end
describe "node?" do describe "node?" do
test "the empty list", %{empty: empty} do test "the empty list", %{empty: empty} do
assert RDF.List.node?(empty.head, empty.graph) == true assert RDF.List.node?(empty.head, empty.graph) == true
@ -362,33 +397,34 @@ defmodule RDF.ListTest do
end end
test "unresolved namespace-qualified name" do test "unresolved namespace-qualified name" do
assert RDF.List.node?(EX.Foo, assert RDF.List.node?(
RDF.List.from([EX.element], head: EX.Foo).graph) == true EX.Foo,
RDF.List.from([EX.element()], head: EX.Foo).graph
) == true
end end
test "when given list node doesn't exist in the given graph" do test "when given list node doesn't exist in the given graph" do
assert RDF.List.node?(RDF.bnode, RDF.Graph.new) == false assert RDF.List.node?(RDF.bnode(), RDF.Graph.new()) == false
end end
test "literal" do test "literal" do
assert RDF.List.node?(~L"Foo", RDF.Graph.new) == false assert RDF.List.node?(~L"Foo", RDF.Graph.new()) == false
assert RDF.List.node?(42, RDF.Graph.new) == false assert RDF.List.node?(42, RDF.Graph.new()) == false
assert RDF.List.node?(true, RDF.Graph.new) == false assert RDF.List.node?(true, RDF.Graph.new()) == false
assert RDF.List.node?(false, RDF.Graph.new) == false assert RDF.List.node?(false, RDF.Graph.new()) == false
assert RDF.List.node?(nil, RDF.Graph.new) == false assert RDF.List.node?(nil, RDF.Graph.new()) == false
end end
test "non-list node" do test "non-list node" do
assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, EX.bar, EX.Baz})) == false assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, EX.bar(), EX.Baz})) == false
end end
test "incomplete list nodes" do test "incomplete list nodes" do
assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first, EX.Baz})) == false assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, RDF.first(), EX.Baz})) == false
assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, RDF.rest, RDF.nil})) == false assert RDF.List.node?(EX.Foo, RDF.Graph.new({EX.Foo, RDF.rest(), RDF.nil()})) == false
end end
end end
describe "Enumerable.reduce" do describe "Enumerable.reduce" do
test "the empty list", %{empty: empty} do test "the empty list", %{empty: empty} do
assert Enum.reduce(empty, [], fn description, acc -> [description | acc] end) == [] assert Enum.reduce(empty, [], fn description, acc -> [description | acc] end) == []
@ -396,23 +432,19 @@ defmodule RDF.ListTest do
test "a valid list", %{one: one} do test "a valid list", %{one: one} do
assert [one.graph[one.head]] == assert [one.graph[one.head]] ==
Enum.reduce(one, [], fn description, acc -> [description | acc] end) Enum.reduce(one, [], fn description, acc -> [description | acc] end)
end end
end end
defp one_element_list(element), defp one_element_list(element),
do: one_element_list(element, RDF.bnode) do: one_element_list(element, RDF.bnode())
defp one_element_list(element, bnode) do defp one_element_list(element, bnode) do
{bnode, {bnode,
Graph.new( Graph.new(
bnode bnode
|> RDF.first(element) |> RDF.first(element)
|> RDF.rest(RDF.nil) |> RDF.rest(RDF.nil())
) )}
}
end end
end end

View File

@ -32,11 +32,12 @@ defmodule RDF.Literal.Datatype.RegistryTest do
gMonth gMonth
gMonthDay gMonthDay
] ]
|> Enum.map(fn xsd_datatype_name -> RDF.iri(NS.XSD.__base_iri__ <> xsd_datatype_name) end) |> Enum.map(fn xsd_datatype_name ->
RDF.iri(NS.XSD.__base_iri__() <> xsd_datatype_name)
end)
@supported_xsd_datatypes RDF.NS.XSD.__iris__() -- @unsupported_xsd_datatypes @supported_xsd_datatypes RDF.NS.XSD.__iris__() -- @unsupported_xsd_datatypes
describe "datatype/1" do describe "datatype/1" do
test "builtin datatypes" do test "builtin datatypes" do
Enum.each(Datatype.Registry.builtin_datatypes(), fn datatype -> Enum.each(Datatype.Registry.builtin_datatypes(), fn datatype ->
@ -60,7 +61,7 @@ defmodule RDF.Literal.Datatype.RegistryTest do
end end
test "with IRI of custom datatype" do test "with IRI of custom datatype" do
assert Age == Datatype.Registry.datatype(Age.id) assert Age == Datatype.Registry.datatype(Age.id())
end end
test "with namespace terms" do test "with namespace terms" do
@ -72,7 +73,9 @@ defmodule RDF.Literal.Datatype.RegistryTest do
assert XSD.Integer == Datatype.Registry.datatype(XSD.integer(42)) assert XSD.Integer == Datatype.Registry.datatype(XSD.integer(42))
assert XSD.Byte == Datatype.Registry.datatype(XSD.byte(42)) assert XSD.Byte == Datatype.Registry.datatype(XSD.byte(42))
assert RDF.LangString == Datatype.Registry.datatype(~L"foo"en) assert RDF.LangString == Datatype.Registry.datatype(~L"foo"en)
assert RDF.Literal.Generic == Datatype.Registry.datatype(RDF.literal("foo", datatype: "http://example.com"))
assert RDF.Literal.Generic ==
Datatype.Registry.datatype(RDF.literal("foo", datatype: "http://example.com"))
end end
end end
@ -100,7 +103,6 @@ defmodule RDF.Literal.Datatype.RegistryTest do
refute Datatype.Registry.xsd_datatype?(42) refute Datatype.Registry.xsd_datatype?(42)
end end
test "numeric_datatype?/1" do test "numeric_datatype?/1" do
assert Datatype.Registry.numeric_datatype?(XSD.integer(42)) assert Datatype.Registry.numeric_datatype?(XSD.integer(42))
assert Datatype.Registry.numeric_datatype?(XSD.byte(42)) assert Datatype.Registry.numeric_datatype?(XSD.byte(42))

View File

@ -12,29 +12,31 @@ defmodule RDF.LiteralTest do
alias RDF.NS alias RDF.NS
@examples %{ @examples %{
XSD.String => ["foo"], XSD.String => ["foo"],
XSD.Integer => [42], XSD.Integer => [42],
XSD.Double => [3.14], XSD.Double => [3.14],
XSD.Decimal => [Decimal.from_float(3.14)], XSD.Decimal => [Decimal.from_float(3.14)],
XSD.Boolean => [true, false], XSD.Boolean => [true, false]
} }
describe "new/1" do describe "new/1" do
Enum.each @examples, fn {datatype, example_values} -> Enum.each(@examples, fn {datatype, example_values} ->
@tag example: %{datatype: datatype, values: example_values} @tag example: %{datatype: datatype, values: example_values}
test "coercion from #{datatype |> Module.split |> List.last |> to_string}", %{example: example} do test "coercion from #{datatype |> Module.split() |> List.last() |> to_string}", %{
Enum.each example.values, fn example_value -> example: example
} do
Enum.each(example.values, fn example_value ->
assert Literal.new(example_value) == example.datatype.new(example_value) assert Literal.new(example_value) == example.datatype.new(example_value)
assert Literal.new!(example_value) == example.datatype.new!(example_value) assert Literal.new!(example_value) == example.datatype.new!(example_value)
end end)
end end
end end)
test "with builtin datatype literals" do test "with builtin datatype literals" do
Enum.each Datatype.Registry.builtin_datatypes(), fn datatype -> Enum.each(Datatype.Registry.builtin_datatypes(), fn datatype ->
datatype_literal = datatype.new("foo").literal datatype_literal = datatype.new("foo").literal
assert %Literal{literal: ^datatype_literal} = Literal.new(datatype_literal) assert %Literal{literal: ^datatype_literal} = Literal.new(datatype_literal)
end end)
end end
test "with custom datatype literals" do test "with custom datatype literals" do
@ -44,44 +46,45 @@ defmodule RDF.LiteralTest do
test "when options without datatype given" do test "when options without datatype given" do
assert Literal.new(true, []) == XSD.Boolean.new(true) assert Literal.new(true, []) == XSD.Boolean.new(true)
assert Literal.new(42, []) == XSD.Integer.new(42) assert Literal.new(42, []) == XSD.Integer.new(42)
assert Literal.new!(true, []) == XSD.Boolean.new!(true) assert Literal.new!(true, []) == XSD.Boolean.new!(true)
assert Literal.new!(42, []) == XSD.Integer.new!(42) assert Literal.new!(42, []) == XSD.Integer.new!(42)
end end
end end
describe "typed construction" do describe "typed construction" do
test "boolean" do test "boolean" do
assert Literal.new(true, datatype: NS.XSD.boolean) == XSD.Boolean.new(true) assert Literal.new(true, datatype: NS.XSD.boolean()) == XSD.Boolean.new(true)
assert Literal.new(false, datatype: NS.XSD.boolean) == XSD.Boolean.new(false) assert Literal.new(false, datatype: NS.XSD.boolean()) == XSD.Boolean.new(false)
assert Literal.new("true", datatype: NS.XSD.boolean) == XSD.Boolean.new("true") assert Literal.new("true", datatype: NS.XSD.boolean()) == XSD.Boolean.new("true")
assert Literal.new("false", datatype: NS.XSD.boolean) == XSD.Boolean.new("false") assert Literal.new("false", datatype: NS.XSD.boolean()) == XSD.Boolean.new("false")
end end
test "integer" do test "integer" do
assert Literal.new(42, datatype: NS.XSD.integer) == XSD.Integer.new(42) assert Literal.new(42, datatype: NS.XSD.integer()) == XSD.Integer.new(42)
assert Literal.new("42", datatype: NS.XSD.integer) == XSD.Integer.new("42") assert Literal.new("42", datatype: NS.XSD.integer()) == XSD.Integer.new("42")
end end
test "double" do test "double" do
assert Literal.new(3.14, datatype: NS.XSD.double) == XSD.Double.new(3.14) assert Literal.new(3.14, datatype: NS.XSD.double()) == XSD.Double.new(3.14)
assert Literal.new("3.14", datatype: NS.XSD.double) == XSD.Double.new("3.14") assert Literal.new("3.14", datatype: NS.XSD.double()) == XSD.Double.new("3.14")
end end
test "decimal" do test "decimal" do
assert Literal.new(3.14, datatype: NS.XSD.decimal) == XSD.Decimal.new(3.14) assert Literal.new(3.14, datatype: NS.XSD.decimal()) == XSD.Decimal.new(3.14)
assert Literal.new("3.14", datatype: NS.XSD.decimal) == XSD.Decimal.new("3.14") assert Literal.new("3.14", datatype: NS.XSD.decimal()) == XSD.Decimal.new("3.14")
assert Literal.new(Decimal.from_float(3.14), datatype: NS.XSD.decimal) ==
assert Literal.new(Decimal.from_float(3.14), datatype: NS.XSD.decimal()) ==
XSD.Decimal.new(Decimal.from_float(3.14)) XSD.Decimal.new(Decimal.from_float(3.14))
end end
test "unsignedInt" do test "unsignedInt" do
assert Literal.new(42, datatype: NS.XSD.unsignedInt) == XSD.UnsignedInt.new(42) assert Literal.new(42, datatype: NS.XSD.unsignedInt()) == XSD.UnsignedInt.new(42)
assert Literal.new("42", datatype: NS.XSD.unsignedInt) == XSD.UnsignedInt.new("42") assert Literal.new("42", datatype: NS.XSD.unsignedInt()) == XSD.UnsignedInt.new("42")
end end
test "string" do test "string" do
assert Literal.new("foo", datatype: NS.XSD.string) == XSD.String.new("foo") assert Literal.new("foo", datatype: NS.XSD.string()) == XSD.String.new("foo")
end end
test "registered custom datatype" do test "registered custom datatype" do
@ -106,17 +109,19 @@ defmodule RDF.LiteralTest do
end end
test "construction of an other than rdf:langString typed and language-tagged literal fails" do test "construction of an other than rdf:langString typed and language-tagged literal fails" do
assert Literal.new("Eule", datatype: RDF.langString, language: "de") == assert Literal.new("Eule", datatype: RDF.langString(), language: "de") ==
LangString.new("Eule", language: "de") LangString.new("Eule", language: "de")
assert_raise ArgumentError, fn -> assert_raise ArgumentError, fn ->
Literal.new("Eule", datatype: NS.XSD.string, language: "de") Literal.new("Eule", datatype: NS.XSD.string(), language: "de")
end end
end end
test "construction of a rdf:langString works, but results in an invalid literal" do test "construction of a rdf:langString works, but results in an invalid literal" do
assert Literal.new("Eule", datatype: RDF.langString) == LangString.new("Eule", []) assert Literal.new("Eule", datatype: RDF.langString()) == LangString.new("Eule", [])
assert_raise RDF.Literal.InvalidError, fn -> assert_raise RDF.Literal.InvalidError, fn ->
Literal.new!("Eule", datatype: RDF.langString) Literal.new!("Eule", datatype: RDF.langString())
end end
end end
end end
@ -197,7 +202,10 @@ defmodule RDF.LiteralTest do
assert ~L"foo"en |> Literal.is_a?(RDF.LangString) assert ~L"foo"en |> Literal.is_a?(RDF.LangString)
assert XSD.integer(42) |> Literal.is_a?(XSD.Integer) assert XSD.integer(42) |> Literal.is_a?(XSD.Integer)
assert XSD.byte(42) |> Literal.is_a?(XSD.Integer) assert XSD.byte(42) |> Literal.is_a?(XSD.Integer)
assert RDF.literal("foo", datatype: "http://example.com/dt") |> RDF.Literal.is_a?(RDF.Literal.Generic)
assert RDF.literal("foo", datatype: "http://example.com/dt")
|> RDF.Literal.is_a?(RDF.Literal.Generic)
refute XSD.float(3.14) |> Literal.is_a?(XSD.Integer) refute XSD.float(3.14) |> Literal.is_a?(XSD.Integer)
end end
@ -230,91 +238,94 @@ defmodule RDF.LiteralTest do
end end
describe "has_datatype?" do describe "has_datatype?" do
Enum.each literals(~W[all_simple all_plain_lang]a), fn literal -> Enum.each(literals(~W[all_simple all_plain_lang]a), fn literal ->
@tag literal: literal @tag literal: literal
test "#{inspect literal} has no datatype", %{literal: literal} do test "#{inspect(literal)} has no datatype", %{literal: literal} do
refute Literal.has_datatype?(literal) refute Literal.has_datatype?(literal)
end end
end end)
Enum.each literals(:all) -- literals(~W[all_simple all_plain_lang]a), fn literal -> Enum.each(literals(:all) -- literals(~W[all_simple all_plain_lang]a), fn literal ->
@tag literal: literal @tag literal: literal
test "Literal for #{inspect literal} has a datatype", %{literal: literal} do test "Literal for #{inspect(literal)} has a datatype", %{literal: literal} do
assert Literal.has_datatype?(literal) assert Literal.has_datatype?(literal)
end end
end end)
end end
describe "plain?" do describe "plain?" do
Enum.each literals(:all_plain), fn literal -> Enum.each(literals(:all_plain), fn literal ->
@tag literal: literal @tag literal: literal
test "#{inspect literal} is plain", %{literal: literal} do test "#{inspect(literal)} is plain", %{literal: literal} do
assert Literal.plain?(literal) assert Literal.plain?(literal)
end end
end end)
Enum.each literals(:all) -- literals(:all_plain), fn literal ->
Enum.each(literals(:all) -- literals(:all_plain), fn literal ->
@tag literal: literal @tag literal: literal
test "Literal for #{inspect literal} is not plain", %{literal: literal} do test "Literal for #{inspect(literal)} is not plain", %{literal: literal} do
refute Literal.plain?(literal) refute Literal.plain?(literal)
end end
end end)
end end
describe "simple?" do describe "simple?" do
Enum.each literals(:all_simple), fn literal -> Enum.each(literals(:all_simple), fn literal ->
@tag literal: literal @tag literal: literal
test "#{inspect literal} is simple", %{literal: literal} do test "#{inspect(literal)} is simple", %{literal: literal} do
assert Literal.simple?(literal) assert Literal.simple?(literal)
end end
end end)
Enum.each literals(:all) -- literals(:all_simple), fn literal ->
Enum.each(literals(:all) -- literals(:all_simple), fn literal ->
@tag literal: literal @tag literal: literal
test "Literal for #{inspect literal} is not simple", %{literal: literal} do test "Literal for #{inspect(literal)} is not simple", %{literal: literal} do
refute Literal.simple?(literal) refute Literal.simple?(literal)
end end
end end)
end end
describe "datatype_id/1" do describe "datatype_id/1" do
Enum.each literals(:all_simple), fn literal -> Enum.each(literals(:all_simple), fn literal ->
@tag literal: literal @tag literal: literal
test "simple literal #{inspect literal} has datatype xsd:string", %{literal: literal} do test "simple literal #{inspect(literal)} has datatype xsd:string", %{literal: literal} do
assert Literal.datatype_id(literal) == NS.XSD.string assert Literal.datatype_id(literal) == NS.XSD.string()
end end
end end)
%{ %{
123 => "integer", 123 => "integer",
true => "boolean", true => "boolean",
false => "boolean", false => "boolean",
9223372036854775807 => "integer", 9_223_372_036_854_775_807 => "integer",
3.1415 => "double", 3.1415 => "double",
~D[2017-04-13] => "date", ~D[2017-04-13] => "date",
~N[2017-04-14 15:32:07] => "dateTime", ~N[2017-04-14 15:32:07] => "dateTime",
~T[01:02:03] => "time" ~T[01:02:03] => "time"
} }
|> Enum.each(fn {value, type} -> |> Enum.each(fn {value, type} ->
@tag data: %{literal: literal = Literal.new(value), type: type} @tag data: %{literal: literal = Literal.new(value), type: type}
test "Literal for #{inspect literal} has datatype xsd:#{type}", test "Literal for #{inspect(literal)} has datatype xsd:#{type}",
%{data: %{literal: literal, type: type}} do %{data: %{literal: literal, type: type}} do
assert Literal.datatype_id(literal) == apply(NS.XSD, String.to_atom(type), []) assert Literal.datatype_id(literal) == apply(NS.XSD, String.to_atom(type), [])
end end
end) end)
end end
describe "language" do describe "language" do
Enum.each literals(:all_plain_lang), fn literal -> Enum.each(literals(:all_plain_lang), fn literal ->
@tag literal: literal @tag literal: literal
test "#{inspect literal} has correct language", %{literal: literal} do test "#{inspect(literal)} has correct language", %{literal: literal} do
assert Literal.language(literal) == "en" assert Literal.language(literal) == "en"
end end
end end)
Enum.each literals(:all) -- literals(:all_plain_lang), fn literal ->
Enum.each(literals(:all) -- literals(:all_plain_lang), fn literal ->
@tag literal: literal @tag literal: literal
test "Literal for #{inspect literal} has no language", %{literal: literal} do test "Literal for #{inspect(literal)} has no language", %{literal: literal} do
assert is_nil(Literal.language(literal)) assert is_nil(Literal.language(literal))
end end
end end)
test "with RDF.LangString literal" do test "with RDF.LangString literal" do
assert Literal.new("Upper", language: "en") |> Literal.language() == "en" assert Literal.new("Upper", language: "en") |> Literal.language() == "en"
@ -358,15 +369,17 @@ defmodule RDF.LiteralTest do
test "with XSD.Datatype literal" do test "with XSD.Datatype literal" do
[ [
XSD.String.new("foo"), XSD.String.new("foo"),
XSD.Byte.new(42), XSD.Byte.new(42)
] ]
|> Enum.each(fn |> Enum.each(fn
canonical_literal -> canonical_literal ->
assert Literal.canonical(canonical_literal) == canonical_literal assert Literal.canonical(canonical_literal) == canonical_literal
end) end)
assert XSD.Integer.new("042") |> Literal.canonical() == Literal.new(42) assert XSD.Integer.new("042") |> Literal.canonical() == Literal.new(42)
assert Literal.new(3.14) |> Literal.canonical() == Literal.new(3.14) |> XSD.Double.canonical()
assert Literal.new(3.14) |> Literal.canonical() ==
Literal.new(3.14) |> XSD.Double.canonical()
end end
test "with RDF.LangString literal" do test "with RDF.LangString literal" do
@ -425,16 +438,24 @@ defmodule RDF.LiteralTest do
end end
test "with RDF.LangString literal" do test "with RDF.LangString literal" do
assert Literal.equal_value?(Literal.new("foo", language: "en"), assert Literal.equal_value?(
Literal.new("foo", language: "en")) == true Literal.new("foo", language: "en"),
Literal.new("foo", language: "en")
) == true
assert Literal.equal_value?(Literal.new("foo", language: "en"), Literal.new("foo")) == nil assert Literal.equal_value?(Literal.new("foo", language: "en"), Literal.new("foo")) == nil
end end
test "with generic literal" do test "with generic literal" do
assert Literal.equal_value?(Literal.new("foo", datatype: "http://example.com/dt"), assert Literal.equal_value?(
Literal.new("foo", datatype: "http://example.com/dt")) == true Literal.new("foo", datatype: "http://example.com/dt"),
assert Literal.equal_value?(Literal.new("foo", datatype: "http://example.com/dt"), Literal.new("foo", datatype: "http://example.com/dt")
Literal.new("foo")) == nil ) == true
assert Literal.equal_value?(
Literal.new("foo", datatype: "http://example.com/dt"),
Literal.new("foo")
) == nil
end end
end end
@ -445,57 +466,62 @@ defmodule RDF.LiteralTest do
end end
test "with RDF.LangString literal" do test "with RDF.LangString literal" do
assert Literal.compare(Literal.new("foo", language: "en"), assert Literal.compare(
Literal.new("bar", language: "en")) == :gt Literal.new("foo", language: "en"),
Literal.new("bar", language: "en")
) == :gt
end end
test "with generic literal" do test "with generic literal" do
assert Literal.compare(Literal.new("foo", datatype: "http://example.com/dt"), assert Literal.compare(
Literal.new("bar", datatype: "http://example.com/dt")) == :gt Literal.new("foo", datatype: "http://example.com/dt"),
Literal.new("bar", datatype: "http://example.com/dt")
) == :gt
end end
end end
@poem XSD.String.new """ @poem XSD.String.new("""
<poem author="Wilhelm Busch"> <poem author="Wilhelm Busch">
Kaum hat dies der Hahn gesehen, Kaum hat dies der Hahn gesehen,
Fängt er auch schon an zu krähen: Fängt er auch schon an zu krähen:
Kikeriki! Kikikerikih!! Kikeriki! Kikikerikih!!
Tak, tak, tak! - da kommen sie. Tak, tak, tak! - da kommen sie.
</poem> </poem>
""" """)
describe "matches?" do describe "matches?" do
test "without flags" do test "without flags" do
[ [
{~L"abracadabra", ~L"bra", true}, {~L"abracadabra", ~L"bra", true},
{~L"abracadabra", ~L"^a.*a$", true}, {~L"abracadabra", ~L"^a.*a$", true},
{~L"abracadabra", ~L"^bra", false}, {~L"abracadabra", ~L"^bra", false},
{@poem, ~L"Kaum.*krähen", false}, {@poem, ~L"Kaum.*krähen", false},
{@poem, ~L"^Kaum.*gesehen,$", false}, {@poem, ~L"^Kaum.*gesehen,$", false},
{~L"foobar", ~L"foo$", false}, {~L"foobar", ~L"foo$", false},
{~L"noe\u0308l", ~L"noe\\u0308l", true}, {~L"noe\u0308l", ~L"noe\\u0308l", true},
{~L"noe\\u0308l", ~L"noe\\\\u0308l", true}, {~L"noe\\u0308l", ~L"noe\\\\u0308l", true},
{~L"\u{01D4B8}", ~L"\\U0001D4B8", true}, {~L"\u{01D4B8}", ~L"\\U0001D4B8", true},
{~L"\\U0001D4B8", ~L"\\\U0001D4B8", true}, {~L"\\U0001D4B8", ~L"\\\U0001D4B8", true},
{~L"abracadabra"en, ~L"bra", true}, {~L"abracadabra"en, ~L"bra", true},
{"abracadabra", "bra", true}, {"abracadabra", "bra", true},
{XSD.Integer.new("42"), ~L"4", true}, {XSD.Integer.new("42"), ~L"4", true},
{XSD.Integer.new("42"), ~L"en", false}, {XSD.Integer.new("42"), ~L"en", false}
] ]
|> Enum.each(fn {literal, pattern, expected_result} -> |> Enum.each(fn {literal, pattern, expected_result} ->
result = Literal.matches?(literal, pattern) result = Literal.matches?(literal, pattern)
assert result == expected_result, assert result == expected_result,
"expected RDF.Literal.matches?(#{inspect literal}, #{inspect pattern}) to return #{inspect expected_result}, but got #{result}" "expected RDF.Literal.matches?(#{inspect(literal)}, #{inspect(pattern)}) to return #{
inspect(expected_result)
}, but got #{result}"
end) end)
end end
test "with flags" do test "with flags" do
[ [
{@poem, ~L"Kaum.*krähen", ~L"s", true}, {@poem, ~L"Kaum.*krähen", ~L"s", true},
{@poem, ~L"^Kaum.*gesehen,$", ~L"m", true}, {@poem, ~L"^Kaum.*gesehen,$", ~L"m", true},
{@poem, ~L"kiki", ~L"i", true}, {@poem, ~L"kiki", ~L"i", true}
] ]
|> Enum.each(fn {literal, pattern, flags, result} -> |> Enum.each(fn {literal, pattern, flags, result} ->
assert Literal.matches?(literal, pattern, flags) == result assert Literal.matches?(literal, pattern, flags) == result
@ -504,13 +530,13 @@ defmodule RDF.LiteralTest do
test "with q flag" do test "with q flag" do
[ [
{~L"abcd", ~L".*", ~L"q", false}, {~L"abcd", ~L".*", ~L"q", false},
{~L"Mr. B. Obama", ~L"B. OBAMA", ~L"iq", true}, {~L"Mr. B. Obama", ~L"B. OBAMA", ~L"iq", true},
# If the q flag is used together with the m, s, or x flag, that flag has no effect. # If the q flag is used together with the m, s, or x flag, that flag has no effect.
{~L"abcd", ~L".*", ~L"mq", true}, {~L"abcd", ~L".*", ~L"mq", true},
{~L"abcd", ~L".*", ~L"qim", true}, {~L"abcd", ~L".*", ~L"qim", true},
{~L"abcd", ~L".*", ~L"xqm", true}, {~L"abcd", ~L".*", ~L"xqm", true}
] ]
|> Enum.each(fn {literal, pattern, flags, result} -> |> Enum.each(fn {literal, pattern, flags, result} ->
assert Literal.matches?(literal, pattern, flags) == result assert Literal.matches?(literal, pattern, flags) == result
@ -523,10 +549,13 @@ defmodule RDF.LiteralTest do
assert XSD.string("foo") assert XSD.string("foo")
|> Literal.update(fn s when is_binary(s) -> s <> "bar" end) == |> Literal.update(fn s when is_binary(s) -> s <> "bar" end) ==
XSD.string("foobar") XSD.string("foobar")
assert XSD.integer(1) |> Literal.update(fn i when is_integer(i) -> i + 1 end) == assert XSD.integer(1) |> Literal.update(fn i when is_integer(i) -> i + 1 end) ==
XSD.integer(2) XSD.integer(2)
assert XSD.byte(42) |> Literal.update(fn i when is_integer(i) -> i + 1 end) == assert XSD.byte(42) |> Literal.update(fn i when is_integer(i) -> i + 1 end) ==
XSD.byte(43) XSD.byte(43)
assert XSD.integer(1) assert XSD.integer(1)
|> Literal.update(fn i when is_integer(i) -> "0" <> to_string(i) end) == |> Literal.update(fn i when is_integer(i) -> "0" <> to_string(i) end) ==
XSD.integer("01") XSD.integer("01")
@ -546,7 +575,7 @@ defmodule RDF.LiteralTest do
test "with as: :lexical opt it passes the lexical form" do test "with as: :lexical opt it passes the lexical form" do
assert XSD.integer(1) assert XSD.integer(1)
|> Literal.update(fn i when is_binary(i) -> "0" <> i end, as: :lexical) == |> Literal.update(fn i when is_binary(i) -> "0" <> i end, as: :lexical) ==
XSD.integer("01") XSD.integer("01")
end end
end end

View File

@ -2,5 +2,4 @@ defmodule RDF.NamespaceTest do
use ExUnit.Case use ExUnit.Case
doctest RDF.Namespace doctest RDF.Namespace
end end

View File

@ -7,148 +7,164 @@ defmodule RDF.NQuads.DecoderTest do
import RDF.Sigils import RDF.Sigils
use RDF.Vocabulary.Namespace use RDF.Vocabulary.Namespace
defvocab EX, defvocab EX, base_iri: "http://example.org/#", terms: [], strict: false
base_iri: "http://example.org/#",
terms: [], strict: false
defvocab P,
base_iri: "http://www.perceive.net/schemas/relationship/",
terms: [], strict: false
defvocab P, base_iri: "http://www.perceive.net/schemas/relationship/", terms: [], strict: false
test "an empty string is deserialized to an empty graph" do test "an empty string is deserialized to an empty graph" do
assert RDF.NQuads.Decoder.decode!("") == Dataset.new assert RDF.NQuads.Decoder.decode!("") == Dataset.new()
assert RDF.NQuads.Decoder.decode!(" \n\r\r\n ") == Dataset.new assert RDF.NQuads.Decoder.decode!(" \n\r\r\n ") == Dataset.new()
end end
test "decoding comments" do test "decoding comments" do
assert RDF.NQuads.Decoder.decode!("# just a comment") == Dataset.new assert RDF.NQuads.Decoder.decode!("# just a comment") == Dataset.new()
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> _:1 <http://example.org/#G>. # a comment <http://example.org/#S> <http://example.org/#p> _:1 <http://example.org/#G>. # a comment
""") == Dataset.new({EX.S, EX.p, RDF.bnode("1"), EX.G}) """) == Dataset.new({EX.S, EX.p(), RDF.bnode("1"), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
# a comment # a comment
<http://example.org/#S> <http://example.org/#p> <http://example.org/#O> <http://example.org/#G>. <http://example.org/#S> <http://example.org/#p> <http://example.org/#O> <http://example.org/#G>.
""") == Dataset.new({EX.S, EX.p, EX.O, EX.G}) """) == Dataset.new({EX.S, EX.p(), EX.O, EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> <http://example.org/#O> <http://example.org/#G>. <http://example.org/#S> <http://example.org/#p> <http://example.org/#O> <http://example.org/#G>.
# a comment # a comment
""") == Dataset.new({EX.S, EX.p, EX.O, EX.G}) """) == Dataset.new({EX.S, EX.p(), EX.O, EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
# Header line 1 # Header line 1
# Header line 2 # Header line 2
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> . <http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> .
# 1st comment # 1st comment
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> . # 2nd comment <http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> . # 2nd comment
# last comment # last comment
""") == Dataset.new([ """) ==
{EX.S1, EX.p1, EX.O1, EX.G}, Dataset.new([
{EX.S1, EX.p2, EX.O2}, {EX.S1, EX.p1(), EX.O1, EX.G},
]) {EX.S1, EX.p2(), EX.O2}
])
end end
test "empty lines" do test "empty lines" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman> . <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman> .
""") == Dataset.new({EX.spiderman, P.enemyOf, EX.green_goblin, ~I<http://example.org/graphs/spiderman>}) """) ==
Dataset.new(
{EX.spiderman(), P.enemyOf(), EX.green_goblin(),
~I<http://example.org/graphs/spiderman>}
)
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman> . <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman> .
""") == Dataset.new({EX.spiderman, P.enemyOf, EX.green_goblin, ~I<http://example.org/graphs/spiderman>}) """) ==
Dataset.new(
{EX.spiderman(), P.enemyOf(), EX.green_goblin(),
~I<http://example.org/graphs/spiderman>}
)
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> . <http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> .
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> . <http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> .
""") == Dataset.new([ """) ==
{EX.S1, EX.p1, EX.O1}, Dataset.new([
{EX.S1, EX.p2, EX.O2, EX.G}, {EX.S1, EX.p1(), EX.O1},
]) {EX.S1, EX.p2(), EX.O2, EX.G}
])
end end
test "decoding a single statement with iris" do test "decoding a single statement with iris" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> . <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> .
""") == Dataset.new({EX.spiderman, P.enemyOf, EX.green_goblin}) """) == Dataset.new({EX.spiderman(), P.enemyOf(), EX.green_goblin()})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman>. <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/enemyOf> <http://example.org/#green_goblin> <http://example.org/graphs/spiderman>.
""") == Dataset.new({EX.spiderman, P.enemyOf, EX.green_goblin, ~I<http://example.org/graphs/spiderman>}) """) ==
Dataset.new(
{EX.spiderman(), P.enemyOf(), EX.green_goblin(),
~I<http://example.org/graphs/spiderman>}
)
end end
test "decoding a single statement with a blank node" do test "decoding a single statement with a blank node" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
_:foo <http://example.org/#p> <http://example.org/#O> <http://example.org/#G> . _:foo <http://example.org/#p> <http://example.org/#O> <http://example.org/#G> .
""") == Dataset.new({RDF.bnode("foo"), EX.p, EX.O, EX.G}) """) == Dataset.new({RDF.bnode("foo"), EX.p(), EX.O, EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> _:1 <http://example.org/#G> . <http://example.org/#S> <http://example.org/#p> _:1 <http://example.org/#G> .
""") == Dataset.new({EX.S, EX.p, RDF.bnode("1"), EX.G}) """) == Dataset.new({EX.S, EX.p(), RDF.bnode("1"), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
_:foo <http://example.org/#p> _:bar <http://example.org/#G> . _:foo <http://example.org/#p> _:bar <http://example.org/#G> .
""") == Dataset.new({RDF.bnode("foo"), EX.p, RDF.bnode("bar"), EX.G}) """) == Dataset.new({RDF.bnode("foo"), EX.p(), RDF.bnode("bar"), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> _:1 _:G . <http://example.org/#S> <http://example.org/#p> _:1 _:G .
""") == Dataset.new({EX.S, EX.p, RDF.bnode("1"), RDF.bnode("G")}) """) == Dataset.new({EX.S, EX.p(), RDF.bnode("1"), RDF.bnode("G")})
end end
test "decoding a single statement with an untyped string literal" do test "decoding a single statement with an untyped string literal" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/realname> "Peter Parker" <http://example.org/#G> . <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/realname> "Peter Parker" <http://example.org/#G> .
""") == Dataset.new({EX.spiderman, P.realname, RDF.literal("Peter Parker"), EX.G}) """) == Dataset.new({EX.spiderman(), P.realname(), RDF.literal("Peter Parker"), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/realname> "Peter Parker" . <http://example.org/#spiderman> <http://www.perceive.net/schemas/relationship/realname> "Peter Parker" .
""") == Dataset.new({EX.spiderman, P.realname, RDF.literal("Peter Parker")}) """) == Dataset.new({EX.spiderman(), P.realname(), RDF.literal("Peter Parker")})
end end
test "decoding a single statement with a typed literal" do test "decoding a single statement with a typed literal" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://example.org/#p> "42"^^<http://www.w3.org/2001/XMLSchema#integer> <http://example.org/#G> . <http://example.org/#spiderman> <http://example.org/#p> "42"^^<http://www.w3.org/2001/XMLSchema#integer> <http://example.org/#G> .
""") == Dataset.new({EX.spiderman, EX.p, RDF.literal(42), EX.G}) """) == Dataset.new({EX.spiderman(), EX.p(), RDF.literal(42), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#spiderman> <http://example.org/#p> "42"^^<http://www.w3.org/2001/XMLSchema#integer> . <http://example.org/#spiderman> <http://example.org/#p> "42"^^<http://www.w3.org/2001/XMLSchema#integer> .
""") == Dataset.new({EX.spiderman, EX.p, RDF.literal(42)}) """) == Dataset.new({EX.spiderman(), EX.p(), RDF.literal(42)})
end end
test "decoding a single statement with a language tagged literal" do test "decoding a single statement with a language tagged literal" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> "foo"@en <http://example.org/#G> . <http://example.org/#S> <http://example.org/#p> "foo"@en <http://example.org/#G> .
""") == Dataset.new({EX.S, EX.p, RDF.literal("foo", language: "en"), EX.G}) """) == Dataset.new({EX.S, EX.p(), RDF.literal("foo", language: "en"), EX.G})
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S> <http://example.org/#p> "foo"@en . <http://example.org/#S> <http://example.org/#p> "foo"@en .
""") == Dataset.new({EX.S, EX.p, RDF.literal("foo", language: "en")}) """) == Dataset.new({EX.S, EX.p(), RDF.literal("foo", language: "en")})
end end
test "decoding multiple statements" do test "decoding multiple statements" do
assert RDF.NQuads.Decoder.decode!(""" assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> . <http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> .
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> . <http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> .
""") == Dataset.new([ """) ==
{EX.S1, EX.p1, EX.O1, EX.G}, Dataset.new([
{EX.S1, EX.p2, EX.O2, EX.G}, {EX.S1, EX.p1(), EX.O1, EX.G},
]) {EX.S1, EX.p2(), EX.O2, EX.G}
assert RDF.NQuads.Decoder.decode!(""" ])
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> .
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> .
<http://example.org/#S2> <http://example.org/#p3> <http://example.org/#O3> <http://example.org/#G> .
<http://example.org/#S2> <http://example.org/#p3> <http://example.org/#O3> .
""") == Dataset.new([
{EX.S1, EX.p1, EX.O1, EX.G},
{EX.S1, EX.p2, EX.O2, EX.G},
{EX.S2, EX.p3, EX.O3, EX.G},
{EX.S2, EX.p3, EX.O3}
])
end
assert RDF.NQuads.Decoder.decode!("""
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> <http://example.org/#G> .
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O2> <http://example.org/#G> .
<http://example.org/#S2> <http://example.org/#p3> <http://example.org/#O3> <http://example.org/#G> .
<http://example.org/#S2> <http://example.org/#p3> <http://example.org/#O3> .
""") ==
Dataset.new([
{EX.S1, EX.p1(), EX.O1, EX.G},
{EX.S1, EX.p2(), EX.O2, EX.G},
{EX.S2, EX.p3(), EX.O3, EX.G},
{EX.S2, EX.p3(), EX.O3}
])
end
end end

Some files were not shown because too many files have changed in this diff Show More