json_ld: RDF to JSON-LD serialization
This commit is contained in:
parent
be144fbe68
commit
0f517d4195
5 changed files with 819 additions and 142 deletions
319
lib/json/ld/encoder.ex
Normal file
319
lib/json/ld/encoder.ex
Normal file
|
@ -0,0 +1,319 @@
|
|||
defmodule JSON.LD.Encoder do
|
||||
@moduledoc """
|
||||
"""
|
||||
|
||||
use RDF.Serialization.Encoder
|
||||
|
||||
import JSON.LD.Utils
|
||||
|
||||
alias RDF.{Dataset, Graph, BlankNode, Literal}
|
||||
alias RDF.NS.{XSD}
|
||||
|
||||
@rdf_type to_string(RDF.NS.RDF.type)
|
||||
@rdf_nil to_string(RDF.NS.RDF.nil)
|
||||
@rdf_first to_string(RDF.NS.RDF.first)
|
||||
@rdf_rest to_string(RDF.NS.RDF.rest)
|
||||
@rdf_list to_string(RDF.uri(RDF.NS.RDF.List))
|
||||
|
||||
def encode(data, opts \\ []) do
|
||||
with {:ok, json_ld_object} <- from_rdf(data, opts) do
|
||||
encode_json(json_ld_object)
|
||||
end
|
||||
end
|
||||
|
||||
def encode!(data, opts \\ []) do
|
||||
data
|
||||
|> from_rdf!(opts)
|
||||
|> encode_json!
|
||||
end
|
||||
|
||||
def from_rdf(dataset, options \\ %JSON.LD.Options{}) do
|
||||
try do
|
||||
{:ok, from_rdf!(dataset, options)}
|
||||
rescue
|
||||
exception -> {:error, Exception.message(exception)}
|
||||
end
|
||||
end
|
||||
|
||||
def from_rdf!(dataset, options \\ %JSON.LD.Options{}) do
|
||||
with options = JSON.LD.Options.new(options) do
|
||||
graph_map =
|
||||
Enum.reduce RDF.Dataset.graphs(dataset), %{},
|
||||
fn ({name, graph}, graph_map) ->
|
||||
# 3.1)
|
||||
name = to_string(name || "@default")
|
||||
|
||||
# 3.3)
|
||||
graph_map =
|
||||
if graph.name && !get_in(graph_map, ["@default", name]) do
|
||||
Map.update graph_map, "@default", %{name => %{"@id" => name}},
|
||||
fn default_graph ->
|
||||
Map.put(default_graph, name, %{"@id" => name})
|
||||
end
|
||||
else
|
||||
graph_map
|
||||
end
|
||||
|
||||
# 3.2 + 3.4)
|
||||
Map.put(graph_map, name,
|
||||
node_map_from_graph(graph, Map.get(graph_map, name, %{}),
|
||||
options.use_native_types, options.use_rdf_type))
|
||||
end
|
||||
|
||||
# 4)
|
||||
graph_map =
|
||||
Enum.reduce graph_map, %{}, fn ({name, graph_object}, graph_map) ->
|
||||
Map.put(graph_map, name, convert_list(graph_object))
|
||||
end
|
||||
|
||||
# 5+6)
|
||||
Map.get(graph_map, "@default", %{})
|
||||
|> Enum.sort_by(fn {subject, _} -> subject end)
|
||||
|> Enum.reduce([], fn ({subject, node}, result) ->
|
||||
# 6.1)
|
||||
node =
|
||||
if Map.has_key?(graph_map, subject) do
|
||||
Map.put node, "@graph",
|
||||
graph_map[subject]
|
||||
|> Enum.sort_by(fn {s, _} -> s end)
|
||||
|> Enum.reduce([], fn ({s, n}, graph_nodes) ->
|
||||
n = Map.delete(n, "usages")
|
||||
if Map.size(n) == 1 and Map.has_key?(n, "@id") do
|
||||
graph_nodes
|
||||
else
|
||||
[n | graph_nodes]
|
||||
end
|
||||
end)
|
||||
|> Enum.reverse
|
||||
else
|
||||
node
|
||||
end
|
||||
|
||||
# 6.2)
|
||||
node = Map.delete(node, "usages")
|
||||
if Map.size(node) == 1 and Map.has_key?(node, "@id") do
|
||||
result
|
||||
else
|
||||
[node | result]
|
||||
end
|
||||
end)
|
||||
|> Enum.reverse
|
||||
end
|
||||
end
|
||||
|
||||
# 3.5)
|
||||
defp node_map_from_graph(graph, current, use_native_types, use_rdf_type) do
|
||||
Enum.reduce(graph, current, fn ({subject, predicate, object}, node_map) ->
|
||||
{subject, predicate, node_object} =
|
||||
{to_string(subject), to_string(predicate), nil}
|
||||
node = Map.get(node_map, subject, %{"@id" => subject})
|
||||
if is_node_object = (match?(%URI{}, object) || match?(%BlankNode{}, object)) do
|
||||
node_object = to_string(object)
|
||||
node_map = Map.put_new(node_map, node_object, %{"@id" => node_object})
|
||||
end
|
||||
node =
|
||||
if is_node_object and !use_rdf_type and predicate == @rdf_type do
|
||||
Map.update(node, "@type", [node_object], fn types ->
|
||||
if node_object in types do
|
||||
types
|
||||
else
|
||||
types ++ [node_object]
|
||||
end
|
||||
end)
|
||||
else
|
||||
value = rdf_to_object(object, use_native_types)
|
||||
node =
|
||||
Map.update(node, predicate, [value], fn objects ->
|
||||
if value in objects do
|
||||
objects
|
||||
else
|
||||
objects ++ [value]
|
||||
end
|
||||
end)
|
||||
if is_node_object do
|
||||
usage = %{
|
||||
"node" => node,
|
||||
"property" => predicate,
|
||||
"value" => value,
|
||||
}
|
||||
node_map =
|
||||
Map.update(node_map, node_object, %{"usages" => [usage]}, fn object_node ->
|
||||
Map.update(object_node, "usages", [usage], fn usages ->
|
||||
usages ++ [usage]
|
||||
end)
|
||||
end)
|
||||
end
|
||||
node
|
||||
end
|
||||
Map.put(node_map, subject, node)
|
||||
end)
|
||||
|> update_node_usages
|
||||
end
|
||||
|
||||
# This function is necessary because we have no references and must update the
|
||||
# node member of the usage maps with later enhanced usages
|
||||
defp update_node_usages(node_map) do
|
||||
Enum.reduce node_map, node_map, fn
|
||||
({subject, %{"usages" => usages} = node}, node_map) ->
|
||||
update_in node_map, [subject, "usages"], fn usages ->
|
||||
Enum.map usages, fn usage ->
|
||||
Map.update! usage, "node", fn %{"@id" => subject} ->
|
||||
node_map[subject]
|
||||
end
|
||||
end
|
||||
end
|
||||
(_, node_map) -> node_map
|
||||
end
|
||||
end
|
||||
|
||||
# This function is necessary because we have no references and use this
|
||||
# instead to update the head by path
|
||||
defp update_head(graph_object, path, old, new) do
|
||||
update_in graph_object, path, fn objects ->
|
||||
Enum.map objects, fn
|
||||
^old -> new
|
||||
current -> current
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# 4)
|
||||
defp convert_list(%{@rdf_nil => nil_node} = graph_object) do
|
||||
Enum.reduce nil_node["usages"], graph_object,
|
||||
# 4.3.1)
|
||||
fn (usage, graph_object) ->
|
||||
# 4.3.2) + 4.3.3)
|
||||
{list, list_nodes, [subject, property] = head_path, head} =
|
||||
extract_list(usage)
|
||||
|
||||
# 4.3.4)
|
||||
skip =
|
||||
if property == @rdf_first do
|
||||
# 4.3.4.1)
|
||||
if subject == @rdf_nil do
|
||||
true
|
||||
else
|
||||
# 4.3.4.3-5)
|
||||
head_path = [head["@id"], @rdf_rest]
|
||||
head = List.first(graph_object[head["@id"]][@rdf_rest])
|
||||
# 4.3.4.6)
|
||||
[_ | list] = list
|
||||
[_ | list_nodes] = list_nodes
|
||||
false
|
||||
end
|
||||
else
|
||||
false
|
||||
end
|
||||
if skip do
|
||||
graph_object
|
||||
else
|
||||
graph_object =
|
||||
update_head graph_object, head_path, head,
|
||||
head
|
||||
# 4.3.5)
|
||||
|> Map.delete("@id")
|
||||
# 4.3.6) isn't necessary, since we built the list in reverse order
|
||||
# 4.3.7)
|
||||
|> Map.put("@list", list)
|
||||
|
||||
# 4.3.8)
|
||||
Enum.reduce(list_nodes, graph_object, fn (node_id, graph_object) ->
|
||||
Map.delete(graph_object, node_id)
|
||||
end)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp convert_list(graph_object), do: graph_object
|
||||
|
||||
|
||||
# 4.3.3)
|
||||
defp extract_list(usage, list \\ [], list_nodes \\ [])
|
||||
|
||||
defp extract_list(
|
||||
%{"node" => %{
|
||||
# Spec FIXME: no mention of @id
|
||||
"@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node
|
||||
"usages" => [usage],
|
||||
@rdf_first => [first],
|
||||
@rdf_rest => [_rest],
|
||||
} = node,
|
||||
"property" => @rdf_rest}, list, list_nodes) when map_size(node) == 4 do
|
||||
extract_list(usage, [first | list], [id | list_nodes])
|
||||
end
|
||||
|
||||
defp extract_list(
|
||||
%{"node" => %{
|
||||
# Spec FIXME: no mention of @id
|
||||
"@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node
|
||||
"@type" => [@rdf_list],
|
||||
"usages" => [usage],
|
||||
@rdf_first => [first],
|
||||
@rdf_rest => [_rest],
|
||||
} = node,
|
||||
"property" => @rdf_rest}, list, list_nodes) when map_size(node) == 5 do
|
||||
extract_list(usage, [first | list], [id | list_nodes])
|
||||
end
|
||||
|
||||
defp extract_list(%{"node" => %{"@id" => subject}, "property" => property, "value" => head},
|
||||
list, list_nodes),
|
||||
do: {list, list_nodes, [subject, property], head}
|
||||
|
||||
|
||||
defp rdf_to_object(%URI{} = uri, _use_native_types) do
|
||||
%{"@id" => to_string(uri)}
|
||||
end
|
||||
|
||||
defp rdf_to_object(%BlankNode{} = bnode, _use_native_types) do
|
||||
%{"@id" => to_string(bnode)}
|
||||
end
|
||||
|
||||
defp rdf_to_object(%Literal{value: value, datatype: datatype} = literal, use_native_types) do
|
||||
result = %{}
|
||||
converted_value = literal
|
||||
type = nil
|
||||
if use_native_types do
|
||||
cond do
|
||||
datatype == XSD.string ->
|
||||
converted_value = value
|
||||
datatype == XSD.boolean ->
|
||||
if RDF.Boolean.valid?(literal) do
|
||||
converted_value = value
|
||||
else
|
||||
type = XSD.boolean
|
||||
end
|
||||
datatype in [XSD.integer, XSD.double] ->
|
||||
if RDF.Literal.valid?(literal) do
|
||||
converted_value = value
|
||||
end
|
||||
true ->
|
||||
type = datatype
|
||||
end
|
||||
else
|
||||
cond do
|
||||
datatype == RDF.langString ->
|
||||
result = Map.put(result, "@language", literal.language)
|
||||
datatype == XSD.string ->
|
||||
nil # no-op
|
||||
true ->
|
||||
type = datatype
|
||||
end
|
||||
end
|
||||
|
||||
result = type && Map.put(result, "@type", to_string(type)) || result
|
||||
Map.put(result, "@value",
|
||||
match?(%Literal{}, converted_value) && Literal.lexical(converted_value) || converted_value)
|
||||
end
|
||||
|
||||
|
||||
# TODO: This should not be dependent on Poison as a JSON encoder in general,
|
||||
# but determine available JSON encoders and use one heuristically or by configuration
|
||||
defp encode_json(value, opts \\ []) do
|
||||
Poison.encode(value)
|
||||
end
|
||||
|
||||
defp encode_json!(value, opts \\ []) do
|
||||
Poison.encode!(value)
|
||||
end
|
||||
|
||||
end
|
|
@ -10,6 +10,8 @@ defmodule JSON.LD.Options do
|
|||
document_loader: nil,
|
||||
expand_context: nil,
|
||||
produce_generalized_rdf: false,
|
||||
use_rdf_type: false,
|
||||
use_native_types: false,
|
||||
processing_mode: "json-ld-1.0"
|
||||
|
||||
def new(), do: %JSON.LD.Options{}
|
||||
|
|
54
test/suite/from_rdf_test.exs
Normal file
54
test/suite/from_rdf_test.exs
Normal file
|
@ -0,0 +1,54 @@
|
|||
defmodule JSON.LD.TestSuite.FromRdfTest do
|
||||
use ExUnit.Case, async: false
|
||||
|
||||
import JSON.LD.TestSuite
|
||||
|
||||
|
||||
setup_all do
|
||||
[base_iri: manifest("fromRdf")["baseIri"]]
|
||||
end
|
||||
|
||||
test_cases("fromRdf")
|
||||
# TODO: https://github.com/json-ld/json-ld.org/issues/357
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0020] end)
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0021] end)
|
||||
# TODO: Ordering problems
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0001] end)
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0002] end)
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0017] end)
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0018] end)
|
||||
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0019] end)
|
||||
|
||||
|> Enum.each(fn %{"name" => name, "input" => input} = test_case ->
|
||||
if input in ~w[fromRdf-0001-in.nq fromRdf-0002-in.nq fromRdf-0017-in.nq fromRdf-0018-in.nq fromRdf-0019-in.nq] do
|
||||
@tag skip: """
|
||||
The values are correct, but the order not, because Elixirs maps with the input graphs have no order.
|
||||
So, fixing that would require a different representation of graphs in general.
|
||||
"""
|
||||
end
|
||||
if input in ~w[fromRdf-0020-in.nq fromRdf-0021-in.nq] do
|
||||
@tag skip: "https://github.com/json-ld/json-ld.org/issues/357"
|
||||
end
|
||||
@tag :test_suite
|
||||
@tag :from_rdf_test_suite
|
||||
@tag data: test_case
|
||||
test "#{input}: #{name}",
|
||||
%{data: %{"input" => input, "expect" => output} = test_case, base_iri: base_iri} do
|
||||
assert serialize(input, test_case_options(test_case, base_iri)) == json(output)
|
||||
end
|
||||
end)
|
||||
|
||||
def serialize(filename, options) do
|
||||
filename
|
||||
|> file
|
||||
|> RDF.NQuads.read_file!
|
||||
|> JSON.LD.Encoder.from_rdf!(options)
|
||||
end
|
||||
|
||||
def json(filename) do
|
||||
filename
|
||||
|> file
|
||||
|> File.read!
|
||||
|> Poison.Parser.parse!
|
||||
end
|
||||
end
|
437
test/unit/encoder_test.exs
Normal file
437
test/unit/encoder_test.exs
Normal file
|
@ -0,0 +1,437 @@
|
|||
defmodule JSON.LD.EncoderTest do
|
||||
use ExUnit.Case, async: false
|
||||
|
||||
doctest JSON.LD.Encoder
|
||||
|
||||
alias RDF.{Dataset}
|
||||
alias RDF.NS
|
||||
alias RDF.NS.{XSD, RDFS}
|
||||
|
||||
import RDF.Sigils
|
||||
|
||||
defmodule TestNS do
|
||||
use RDF.Vocabulary.Namespace
|
||||
defvocab EX, base_uri: "http://example.com/", terms: [], strict: false
|
||||
defvocab S, base_uri: "http://schema.org/", terms: [], strict: false
|
||||
end
|
||||
|
||||
alias TestNS.{EX, S}
|
||||
|
||||
|
||||
def gets_serialized_to(input, output, opts \\ []) do
|
||||
data_structs = Keyword.get(opts, :only, [Dataset])
|
||||
Enum.each data_structs, fn data_struct ->
|
||||
assert JSON.LD.Encoder.from_rdf!(data_struct.new(input), opts) == output
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
test "an empty RDF.Dataset is serialized to an JSON array string" do
|
||||
assert JSON.LD.Encoder.encode!(Dataset.new) == "[]"
|
||||
end
|
||||
|
||||
describe "simple tests" do
|
||||
test "One subject IRI object" do
|
||||
{~I<http://a/b>, ~I<http://a/c>, ~I<http://a/d>}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://a/b",
|
||||
"http://a/c" => [%{"@id" => "http://a/d"}]
|
||||
}])
|
||||
end
|
||||
|
||||
test "should generate object list" do
|
||||
[{EX.b, EX.c, EX.d}, {EX.b, EX.c, EX.e}]
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/b",
|
||||
"http://example.com/c" => [
|
||||
%{"@id" => "http://example.com/d"},
|
||||
%{"@id" => "http://example.com/e"}
|
||||
]
|
||||
}])
|
||||
end
|
||||
|
||||
test "should generate property list" do
|
||||
[{EX.b, EX.c, EX.d}, {EX.b, EX.e, EX.f}]
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/b",
|
||||
"http://example.com/c" => [%{"@id" => "http://example.com/d"}],
|
||||
"http://example.com/e" => [%{"@id" => "http://example.com/f"}]
|
||||
}])
|
||||
end
|
||||
|
||||
test "serializes multiple subjects" do
|
||||
[
|
||||
{~I<http://test-cases/0001>, NS.RDF.type, ~I<http://www.w3.org/2006/03/test-description#TestCase>},
|
||||
{~I<http://test-cases/0002>, NS.RDF.type, ~I<http://www.w3.org/2006/03/test-description#TestCase>}
|
||||
]
|
||||
|> gets_serialized_to([
|
||||
%{"@id" => "http://test-cases/0001", "@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]},
|
||||
%{"@id" => "http://test-cases/0002", "@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]},
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
describe "literal coercion" do
|
||||
test "typed literal" do
|
||||
{EX.a, EX.b, RDF.literal("foo", datatype: EX.d)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "foo", "@type" => "http://example.com/d"}]
|
||||
}])
|
||||
end
|
||||
|
||||
test "integer" do
|
||||
{EX.a, EX.b, RDF.literal(1)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => 1}]
|
||||
}], use_native_types: true)
|
||||
end
|
||||
|
||||
test "integer (non-native)" do
|
||||
{EX.a, EX.b, RDF.literal(1)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "1","@type" => "http://www.w3.org/2001/XMLSchema#integer"}]
|
||||
}], use_native_types: false)
|
||||
end
|
||||
|
||||
test "boolean" do
|
||||
{EX.a, EX.b, RDF.literal(true)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => true}]
|
||||
}], use_native_types: true)
|
||||
end
|
||||
|
||||
test "boolean (non-native)" do
|
||||
{EX.a, EX.b, RDF.literal(true)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "true","@type" => "http://www.w3.org/2001/XMLSchema#boolean"}]
|
||||
}], use_native_types: false)
|
||||
end
|
||||
|
||||
@tag skip: "TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
|
||||
test "decimal" do
|
||||
{EX.a, EX.b, RDF.literal(1.0)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "1.0", "@type" => "http://www.w3.org/2001/XMLSchema#decimal"}]
|
||||
}], use_native_types: true)
|
||||
end
|
||||
|
||||
test "double" do
|
||||
{EX.a, EX.b, RDF.literal(1.0e0)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => 1.0E0}]
|
||||
}], use_native_types: true)
|
||||
end
|
||||
|
||||
@tag skip: "TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
|
||||
test "double (non-native)" do
|
||||
{EX.a, EX.b, RDF.literal(1.0e0)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "1.0E0", "@type" => "http://www.w3.org/2001/XMLSchema#double"}]
|
||||
}], use_native_types: false)
|
||||
end
|
||||
end
|
||||
|
||||
describe "datatyped (non-native) literals" do
|
||||
%{
|
||||
integer: 1,
|
||||
unsignedInt: 1,
|
||||
nonNegativeInteger: 1,
|
||||
float: 1,
|
||||
nonPositiveInteger: -1,
|
||||
negativeInteger: -1,
|
||||
}
|
||||
|> Enum.each(fn ({type, _} = data) ->
|
||||
@tag data: data
|
||||
test "#{type}", %{data: {type, value}} do
|
||||
{EX.a, EX.b, RDF.literal(value, datatype: apply(XSD, type, []))}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "#{value}", "@type" => "http://www.w3.org/2001/XMLSchema##{type}"}]
|
||||
}], use_native_types: false)
|
||||
end
|
||||
end)
|
||||
|
||||
test "when useNativeTypes" do
|
||||
{EX.a, EX.b, RDF.literal("foo", datatype: EX.customType)}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "foo", "@type" => to_string(EX.customType)}]
|
||||
}], use_native_types: true)
|
||||
end
|
||||
end
|
||||
|
||||
test "encodes language literal" do
|
||||
{EX.a, EX.b, RDF.literal("foo", language: "en-us")}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@value" => "foo", "@language" => "en-us"}]
|
||||
}])
|
||||
end
|
||||
|
||||
|
||||
describe "blank nodes" do
|
||||
test "should generate blank nodes" do
|
||||
{RDF.bnode(:a), EX.a, EX.b}
|
||||
|> gets_serialized_to([%{
|
||||
"@id" => "_:a",
|
||||
"http://example.com/a" => [%{"@id" => "http://example.com/b"}]
|
||||
}])
|
||||
end
|
||||
|
||||
test "should generate blank nodes as object" do
|
||||
[
|
||||
{EX.a, EX.b, RDF.bnode(:a)},
|
||||
{RDF.bnode(:a), EX.c, EX.d}
|
||||
]
|
||||
|> gets_serialized_to([
|
||||
%{
|
||||
"@id" => "_:a",
|
||||
"http://example.com/c" => [%{"@id" => "http://example.com/d"}]
|
||||
},
|
||||
%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@id" => "_:a"}]
|
||||
}
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
describe "lists" do
|
||||
%{
|
||||
"literal list" => {
|
||||
[
|
||||
{EX.a, EX.b, RDF.bnode(:e1) },
|
||||
{RDF.bnode(:e1), NS.RDF.first, ~L"apple"},
|
||||
{RDF.bnode(:e1), NS.RDF.rest, RDF.bnode(:e2)},
|
||||
{RDF.bnode(:e2), NS.RDF.first, ~L"banana"},
|
||||
{RDF.bnode(:e2), NS.RDF.rest, NS.RDF.nil},
|
||||
],
|
||||
[%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{
|
||||
"@list" => [
|
||||
%{"@value" => "apple"},
|
||||
%{"@value" => "banana"}
|
||||
]
|
||||
}]
|
||||
}]
|
||||
},
|
||||
"iri list" => {
|
||||
[
|
||||
{EX.a, EX.b, RDF.bnode(:list)},
|
||||
{RDF.bnode(:list), NS.RDF.first, EX.c},
|
||||
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
|
||||
],
|
||||
[%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{
|
||||
"@list" => [
|
||||
%{"@id" => "http://example.com/c"}
|
||||
]
|
||||
}]
|
||||
}]
|
||||
},
|
||||
"empty list" => {
|
||||
[
|
||||
{EX.a, EX.b, NS.RDF.nil},
|
||||
],
|
||||
[%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@list" => []}]
|
||||
}]
|
||||
},
|
||||
"single element list" => {
|
||||
[
|
||||
{EX.a, EX.b, RDF.bnode(:list)},
|
||||
{RDF.bnode(:list), NS.RDF.first, ~L"apple"},
|
||||
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
|
||||
],
|
||||
[%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@list" => [%{"@value" => "apple"}]}]
|
||||
}]
|
||||
},
|
||||
"single element list without @type" => {
|
||||
[
|
||||
{EX.a, EX.b, RDF.bnode(:list)},
|
||||
{RDF.bnode(:list), NS.RDF.first, RDF.bnode(:a)},
|
||||
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
|
||||
{RDF.bnode(:a), EX.b, ~L"foo"},
|
||||
],
|
||||
[
|
||||
%{
|
||||
"@id" => "_:a",
|
||||
"http://example.com/b" => [%{"@value" => "foo"}]
|
||||
},
|
||||
%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@list" => [%{"@id" => "_:a"}]}]
|
||||
},
|
||||
]
|
||||
},
|
||||
"multiple graphs with shared BNode" => {
|
||||
[
|
||||
{EX.z, EX.q, RDF.bnode(:z0), EX.G},
|
||||
{RDF.bnode(:z0), NS.RDF.first, ~L"cell-A", EX.G},
|
||||
{RDF.bnode(:z0), NS.RDF.rest, RDF.bnode(:z1), EX.G},
|
||||
{RDF.bnode(:z1), NS.RDF.first, ~L"cell-B", EX.G},
|
||||
{RDF.bnode(:z1), NS.RDF.rest, NS.RDF.nil, EX.G},
|
||||
{EX.x, EX.p, RDF.bnode(:z1), EX.G1},
|
||||
],
|
||||
[%{
|
||||
"@id" => "http://www.example.com/G",
|
||||
"@graph" => [%{
|
||||
"@id" => "_:z0",
|
||||
"http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [%{"@value" => "cell-A"}],
|
||||
"http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [%{"@id" => "_:z1"}]
|
||||
}, %{
|
||||
"@id" => "_:z1",
|
||||
"http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [%{"@value" => "cell-B"}],
|
||||
"http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [%{"@list" => []}]
|
||||
}, %{
|
||||
"@id" => "http://www.example.com/z",
|
||||
"http://www.example.com/q" => [%{"@id" => "_:z0"}]
|
||||
}]
|
||||
},
|
||||
%{
|
||||
"@id" => "http://www.example.com/G1",
|
||||
"@graph" => [%{
|
||||
"@id" => "http://www.example.com/x",
|
||||
"http://www.example.com/p" => [%{"@id" => "_:z1"}]
|
||||
}]
|
||||
}]
|
||||
},
|
||||
}
|
||||
|> Enum.each(fn ({title, data}) ->
|
||||
if title == "multiple graphs with shared BNode" do
|
||||
@tag skip: "TODO: https://github.com/json-ld/json-ld.org/issues/357"
|
||||
end
|
||||
@tag data: data
|
||||
test title, %{data: {input, output}} do
|
||||
input |> gets_serialized_to(output)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
describe "quads" do
|
||||
%{
|
||||
"simple named graph" => %{
|
||||
input: {EX.a, EX.b, EX.c, EX.U},
|
||||
output: [
|
||||
%{
|
||||
"@id" => "http://example.com/U",
|
||||
"@graph" => [%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@id" => "http://example.com/c"}]
|
||||
}]
|
||||
},
|
||||
]
|
||||
},
|
||||
"with properties" => %{
|
||||
input: [
|
||||
{EX.a, EX.b, EX.c, EX.U},
|
||||
{EX.U, EX.d, EX.e},
|
||||
],
|
||||
output: [
|
||||
%{
|
||||
"@id" => "http://example.com/U",
|
||||
"@graph" => [%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@id" => "http://example.com/c"}]
|
||||
}],
|
||||
"http://example.com/d" => [%{"@id" => "http://example.com/e"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
"with lists" => %{
|
||||
input: [
|
||||
{EX.a, EX.b, RDF.bnode(:a), EX.U},
|
||||
{RDF.bnode(:a), NS.RDF.first, EX.c, EX.U},
|
||||
{RDF.bnode(:a), NS.RDF.rest, NS.RDF.nil, EX.U},
|
||||
{EX.U, EX.d, RDF.bnode(:b)},
|
||||
{RDF.bnode(:b), NS.RDF.first, EX.e},
|
||||
{RDF.bnode(:b), NS.RDF.rest, NS.RDF.nil},
|
||||
],
|
||||
output: [
|
||||
%{
|
||||
"@id" => "http://example.com/U",
|
||||
"@graph" => [%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{"@list" => [%{"@id" => "http://example.com/c"}]}]
|
||||
}],
|
||||
"http://example.com/d" => [%{"@list" => [%{"@id" => "http://example.com/e"}]}]
|
||||
}
|
||||
]
|
||||
},
|
||||
"Two Graphs with same subject and lists" => %{
|
||||
input: [
|
||||
{EX.a, EX.b, RDF.bnode(:a), EX.U},
|
||||
{RDF.bnode(:a), NS.RDF.first, EX.c, EX.U},
|
||||
{RDF.bnode(:a), NS.RDF.rest, NS.RDF.nil, EX.U},
|
||||
{EX.a, EX.b, RDF.bnode(:b), EX.V},
|
||||
{RDF.bnode(:b), NS.RDF.first, EX.e, EX.V},
|
||||
{RDF.bnode(:b), NS.RDF.rest, NS.RDF.nil, EX.V},
|
||||
],
|
||||
output: [
|
||||
%{
|
||||
"@id" => "http://example.com/U",
|
||||
"@graph" => [
|
||||
%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{
|
||||
"@list" => [%{"@id" => "http://example.com/c"}]
|
||||
}]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
"@id" => "http://example.com/V",
|
||||
"@graph" => [
|
||||
%{
|
||||
"@id" => "http://example.com/a",
|
||||
"http://example.com/b" => [%{
|
||||
"@list" => [%{"@id" => "http://example.com/e"}]
|
||||
}]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
|> Enum.each(fn ({title, data}) ->
|
||||
@tag data: data
|
||||
test title, %{data: %{input: input, output: output}} do
|
||||
input |> gets_serialized_to(output, only: [Dataset])
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
describe "problems" do
|
||||
%{
|
||||
"xsd:boolean as value" => {
|
||||
{~I<http://data.wikia.com/terms#playable>, RDFS.range, XSD.boolean},
|
||||
[%{
|
||||
"@id" => "http://data.wikia.com/terms#playable",
|
||||
"http://www.w3.org/2000/01/rdf-schema#range" => [
|
||||
%{ "@id" => "http://www.w3.org/2001/XMLSchema#boolean" }
|
||||
]
|
||||
}]
|
||||
},
|
||||
}
|
||||
|> Enum.each(fn ({title, data}) ->
|
||||
@tag data: data
|
||||
test title, %{data: {input, output}} do
|
||||
input |> gets_serialized_to(output)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
end
|
|
@ -937,10 +937,11 @@ defmodule JSON.LD.ExpansionTest do
|
|||
"native boolean" => ["foo", true, %{"@value" => true}],
|
||||
"native integer" => ["foo", 1, %{"@value" => 1}],
|
||||
"native double" => ["foo", 1.1e1, %{"@value" => 1.1E1}],
|
||||
# TODO:
|
||||
# "native date" => ["foo", Date.parse("2011-12-27"), %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
|
||||
# "native time" => ["foo", Time.parse("10:11:12Z"), %{"@value" => "10:11:12Z", "@type" => XSD.time |> to_string}],
|
||||
# "native dateTime" =>["foo", DateTime.parse("2011-12-27T10:11:12Z"), {"@value" => "2011-12-27T10:11:12Z", "@type" => XSD.dateTime |> to_string}],
|
||||
# TODO: Do we really want to support the following? RDF.rb has another implementation and uses this function
|
||||
# for its implementation of fromRdf, instead of the RDF to Object Conversion algorithm in the spec ...
|
||||
# "native date" => ["foo", ~D[2011-12-27], %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
|
||||
# "native time" => ["foo", ~T[10:11:12Z], %{"@value" => "10:11:12Z", "@type" => XSD.time |> to_string}],
|
||||
# "native dateTime" =>["foo", DateTime.from_iso8601("2011-12-27T10:11:12Z") |> elem(1), %{"@value" => "2011-12-27T10:11:12Z", "@type" => XSD.dateTime |> to_string}],
|
||||
# "rdf boolean" => ["foo", RDF::Literal(true), %{"@value" => "true", "@type" => RDF::XSD.boolean.to_s}],
|
||||
# "rdf integer" => ["foo", RDF::Literal(1), %{"@value" => "1", "@type" => XSD.integer |> to_string],
|
||||
# "rdf decimal" => ["foo", RDF::Literal::Decimal.new(1.1), %{"@value" => "1.1", "@type" => XSD.decimal |> to_string}],
|
||||
|
@ -969,8 +970,8 @@ defmodule JSON.LD.ExpansionTest do
|
|||
}
|
||||
|> Enum.each(fn ({title, data}) ->
|
||||
# TODO
|
||||
# @tag skip: "Do these errors from the differing context setup?"
|
||||
@tag skip: "Why does this produce @language tags, although no term definition of foo exists? Is this also RDF.rb specific?"
|
||||
# @tag skip: "Do these errors originate from the differing context setup?"
|
||||
@tag skip: "Why does this produce @language tags in RDF.rb, although no term definition of foo exists? Is this also RDF.rb specific?"
|
||||
@tag data: data
|
||||
test "@language #{title}", %{data: [key, compacted, expanded], example_context: context} do
|
||||
assert expand_value(context, key, compacted) == expanded
|
||||
|
@ -999,140 +1000,4 @@ defmodule JSON.LD.ExpansionTest do
|
|||
end)
|
||||
end
|
||||
|
||||
# describe "#container" do
|
||||
# subject {
|
||||
# ctx = context.parse({
|
||||
# "ex" => "http://example.org/",
|
||||
# "list" => {"@id" => "ex:list", "@container" => "@list"},
|
||||
# "set" => {"@id" => "ex:set", "@container" => "@set"},
|
||||
# "ndx" => {"@id" => "ex:ndx", "@container" => "@index"},
|
||||
# })
|
||||
# logger.clear
|
||||
# ctx
|
||||
# }
|
||||
# it "uses TermDefinition" do
|
||||
# expect(subject.container(subject.term_definitions['ex'])).to be_nil
|
||||
# expect(subject.container(subject.term_definitions['list'])).to eq '@list'
|
||||
# expect(subject.container(subject.term_definitions['set'])).to eq '@set'
|
||||
# expect(subject.container(subject.term_definitions['ndx'])).to eq '@index'
|
||||
# end
|
||||
#
|
||||
# it "uses string" do
|
||||
# expect(subject.container('ex')).to be_nil
|
||||
# expect(subject.container('list')).to eq '@list'
|
||||
# expect(subject.container('set')).to eq '@set'
|
||||
# expect(subject.container('ndx')).to eq '@index'
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# describe "#language" do
|
||||
# subject {
|
||||
# ctx = context.parse({
|
||||
# "ex" => "http://example.org/",
|
||||
# "nil" => {"@id" => "ex:nil", "@language" => nil},
|
||||
# "en" => {"@id" => "ex:en", "@language" => "en"},
|
||||
# })
|
||||
# logger.clear
|
||||
# ctx
|
||||
# }
|
||||
# it "uses TermDefinition" do
|
||||
# expect(subject.language(subject.term_definitions['ex'])).to be_falsey
|
||||
# expect(subject.language(subject.term_definitions['nil'])).to be_falsey
|
||||
# expect(subject.language(subject.term_definitions['en'])).to eq 'en'
|
||||
# end
|
||||
#
|
||||
# it "uses string" do
|
||||
# expect(subject.language('ex')).to be_falsey
|
||||
# expect(subject.language('nil')).to be_falsey
|
||||
# expect(subject.language('en')).to eq 'en'
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# describe "#reverse?" do
|
||||
# subject {
|
||||
# ctx = context.parse({
|
||||
# "ex" => "http://example.org/",
|
||||
# "reverse" => {"@reverse" => "ex:reverse"},
|
||||
# })
|
||||
# logger.clear
|
||||
# ctx
|
||||
# }
|
||||
# it "uses TermDefinition" do
|
||||
# expect(subject.reverse?(subject.term_definitions['ex'])).to be_falsey
|
||||
# expect(subject.reverse?(subject.term_definitions['reverse'])).to be_truthy
|
||||
# end
|
||||
#
|
||||
# it "uses string" do
|
||||
# expect(subject.reverse?('ex')).to be_falsey
|
||||
# expect(subject.reverse?('reverse')).to be_truthy
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# describe "#reverse_term" do
|
||||
# subject {
|
||||
# ctx = context.parse({
|
||||
# "ex" => "http://example.org/",
|
||||
# "reverse" => {"@reverse" => "ex"},
|
||||
# })
|
||||
# logger.clear
|
||||
# ctx
|
||||
# }
|
||||
# it "uses TermDefinition" do
|
||||
# expect(subject.reverse_term(subject.term_definitions['ex'])).to eql subject.term_definitions['reverse']
|
||||
# expect(subject.reverse_term(subject.term_definitions['reverse'])).to eql subject.term_definitions['ex']
|
||||
# end
|
||||
#
|
||||
# it "uses string" do
|
||||
# expect(subject.reverse_term('ex')).to eql subject.term_definitions['reverse']
|
||||
# expect(subject.reverse_term('reverse')).to eql subject.term_definitions['ex']
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# describe JSON::LD::Context::TermDefinition do
|
||||
# context "with nothing" do
|
||||
# subject {described_class.new("term")}
|
||||
# its(:term) {is_expected.to eq "term"}
|
||||
# its(:id) {is_expected.to be_nil}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term"))}
|
||||
# end
|
||||
#
|
||||
# context "with id" do
|
||||
# subject {described_class.new("term", id: "http://example.org/term")}
|
||||
# its(:term) {is_expected.to eq "term"}
|
||||
# its(:id) {is_expected.to eq "http://example.org/term"}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", id: "http://example.org/term"))}
|
||||
# end
|
||||
#
|
||||
# context "with type_mapping" do
|
||||
# subject {described_class.new("term", type_mapping: "http://example.org/type")}
|
||||
# its(:type_mapping) {is_expected.to eq "http://example.org/type"}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", type_mapping: "http://example.org/type"))}
|
||||
# end
|
||||
#
|
||||
# context "with container_mapping" do
|
||||
# subject {described_class.new("term", container_mapping: "@set")}
|
||||
# its(:container_mapping) {is_expected.to eq "@set"}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", container_mapping: "@set"))}
|
||||
# end
|
||||
#
|
||||
# context "with language_mapping" do
|
||||
# subject {described_class.new("term", language_mapping: "en")}
|
||||
# its(:language_mapping) {is_expected.to eq "en"}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", language_mapping: "en"))}
|
||||
# end
|
||||
#
|
||||
# context "with reverse_property" do
|
||||
# subject {described_class.new("term", reverse_property: true)}
|
||||
# its(:reverse_property) {is_expected.to be_truthy}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", reverse_property: true))}
|
||||
# end
|
||||
#
|
||||
# context "with simple" do
|
||||
# subject {described_class.new("term", simple: true)}
|
||||
# its(:simple) {is_expected.to be_truthy}
|
||||
# its(:to_rb) {is_expected.to eq %(TermDefinition.new("term", simple: true))}
|
||||
# end
|
||||
# end
|
||||
|
||||
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue