Apply mix formatter

This commit is contained in:
Marcel Otto 2020-06-20 04:25:58 +02:00
parent 9314cc6757
commit b06586c387
38 changed files with 4084 additions and 3080 deletions

4
.formatter.exs Normal file
View File

@ -0,0 +1,4 @@
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
import_deps: [:rdf]
]

View File

@ -4,32 +4,37 @@ defmodule JSON.LD.Compaction do
import JSON.LD.Utils
alias JSON.LD.Context
def compact(input, context, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
active_context = JSON.LD.context(context, options),
inverse_context = Context.inverse(active_context),
expanded = JSON.LD.expand(input, options)
do
expanded = JSON.LD.expand(input, options) do
result =
case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do
[] ->
%{}
result when is_list(result) ->
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
%{compact_iri("@graph", active_context, inverse_context, nil, true) => result}
result ->
result
end
if Context.empty?(active_context),
do: result,
else: Map.put(result, "@context", context["@context"] || context)
end
end
defp do_compact(element, active_context, inverse_context, active_property,
compact_arrays \\ true)
defp do_compact(
element,
active_context,
inverse_context,
active_property,
compact_arrays \\ true
)
# 1) If element is a scalar, it is already in its most compact form, so simply return element.
defp do_compact(element, _, _, _, _)
@ -39,14 +44,19 @@ defmodule JSON.LD.Compaction do
# 2) If element is an array
defp do_compact(element, active_context, inverse_context, active_property, compact_arrays)
when is_list(element) do
result = Enum.reduce(element, [], fn (item, result) ->
result =
Enum.reduce(element, [], fn item, result ->
case do_compact(item, active_context, inverse_context, active_property, compact_arrays) do
nil -> result
compacted_item -> [compacted_item | result]
end
end) |> Enum.reverse
end)
|> Enum.reverse()
if compact_arrays and length(result) == 1 and
is_nil((term_def = active_context.term_defs[active_property]) && term_def.container_mapping) do
is_nil(
(term_def = active_context.term_defs[active_property]) && term_def.container_mapping
) do
List.first(result)
else
result
@ -57,50 +67,81 @@ defmodule JSON.LD.Compaction do
defp do_compact(element, active_context, inverse_context, active_property, compact_arrays)
when is_map(element) do
# 4)
if (Map.has_key?(element, "@value") or Map.has_key?(element, "@id")) do
if Map.has_key?(element, "@value") or Map.has_key?(element, "@id") do
result = compact_value(element, active_context, inverse_context, active_property)
if scalar?(result) do
result
else
do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays)
do_compact_non_scalar(
element,
active_context,
inverse_context,
active_property,
compact_arrays
)
end
else
do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays)
do_compact_non_scalar(
element,
active_context,
inverse_context,
active_property,
compact_arrays
)
end
end
defp do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays) do
defp do_compact_non_scalar(
element,
active_context,
inverse_context,
active_property,
compact_arrays
) do
# 5)
inside_reverse = active_property == "@reverse"
# 6) + 7)
element
|> Enum.sort_by(fn {expanded_property , _} -> expanded_property end)
|> Enum.reduce(%{}, fn ({expanded_property, expanded_value}, result) ->
|> Enum.sort_by(fn {expanded_property, _} -> expanded_property end)
|> Enum.reduce(%{}, fn {expanded_property, expanded_value}, result ->
cond do
# 7.1)
expanded_property in ~w[@id @type] ->
# 7.1.1)
compacted_value =
if is_binary(expanded_value) do
compact_iri(expanded_value, active_context, inverse_context, nil,
expanded_property == "@type")
compact_iri(
expanded_value,
active_context,
inverse_context,
nil,
expanded_property == "@type"
)
# 7.1.2)
else
# 7.1.2.1)
# TODO: RDF.rb calls also Array#compact
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
else: [expanded_value]
)
# 7.1.2.2)
|> Enum.reduce([], fn (expanded_type, compacted_value) ->
|> Enum.reduce([], fn expanded_type, compacted_value ->
compacted_value ++
[compact_iri(expanded_type, active_context, inverse_context, nil, true)]
end)
# 7.1.2.3)
|> case(do: (
|> case(
do:
(
[compacted_value] -> compacted_value
compacted_value -> compacted_value))
compacted_value -> compacted_value
)
)
end
# 7.1.3)
alias = compact_iri(expanded_property, active_context, inverse_context, nil, true)
# 7.1.4)
@ -109,11 +150,13 @@ defmodule JSON.LD.Compaction do
# 7.2)
expanded_property == "@reverse" ->
# 7.2.1)
compacted_value = do_compact(expanded_value, active_context, inverse_context, "@reverse")
compacted_value =
do_compact(expanded_value, active_context, inverse_context, "@reverse")
# 7.2.2)
{compacted_value, result} =
Enum.reduce compacted_value, {%{}, result},
fn ({property, value}, {compacted_value, result}) ->
Enum.reduce(compacted_value, {%{}, result}, fn {property, value},
{compacted_value, result} ->
term_def = active_context.term_defs[property]
# 7.2.2.1)
if term_def && term_def.reverse_property do
@ -125,12 +168,14 @@ defmodule JSON.LD.Compaction do
else
value
end
# 7.2.2.1.2) + 7.2.2.1.3)
{compacted_value, merge_compacted_value(result, property, value)}
else
{Map.put(compacted_value, property, value), result}
end
end
end)
# 7.2.3)
unless Enum.empty?(compacted_value) do
# 7.2.3.1)
@ -160,8 +205,15 @@ defmodule JSON.LD.Compaction do
if expanded_value == [] do
# 7.5.1)
item_active_property =
compact_iri(expanded_property, active_context, inverse_context,
expanded_value, true, inside_reverse)
compact_iri(
expanded_property,
active_context,
inverse_context,
expanded_value,
true,
inside_reverse
)
# 7.5.2)
Map.update(result, item_active_property, [], fn
value when not is_list(value) -> [value]
@ -172,11 +224,17 @@ defmodule JSON.LD.Compaction do
end
# 7.6)
Enum.reduce(expanded_value, result, fn (expanded_item, result) ->
Enum.reduce(expanded_value, result, fn expanded_item, result ->
# 7.6.1)
item_active_property =
compact_iri(expanded_property, active_context, inverse_context,
expanded_item, true, inside_reverse)
compact_iri(
expanded_property,
active_context,
inverse_context,
expanded_item,
true,
inside_reverse
)
# 7.6.2)
term_def = active_context.term_defs[item_active_property]
@ -184,38 +242,50 @@ defmodule JSON.LD.Compaction do
# 7.6.3)
value = (is_map(expanded_item) && expanded_item["@list"]) || expanded_item
compacted_item =
do_compact(value, active_context, inverse_context,
item_active_property, compact_arrays)
do_compact(
value,
active_context,
inverse_context,
item_active_property,
compact_arrays
)
# 7.6.4)
compacted_item =
if list?(expanded_item) do
# 7.6.4.1)
compacted_item =
unless is_list(compacted_item),
do: [compacted_item], else: compacted_item
unless is_list(compacted_item), do: [compacted_item], else: compacted_item
# 7.6.4.2)
unless container == "@list" do
# 7.6.4.2.1)
compacted_item = %{
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
compact_iri("@list", active_context, inverse_context, nil, true) =>
compacted_item}
compacted_item
}
# 7.6.4.2.2)
if Map.has_key?(expanded_item, "@index") do
Map.put(compacted_item,
Map.put(
compacted_item,
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
compact_iri("@index", active_context, inverse_context, nil, true),
expanded_item["@index"])
expanded_item["@index"]
)
else
compacted_item
end
# 7.6.4.3)
else
if Map.has_key?(result, item_active_property) do
raise JSON.LD.CompactionToListOfListsError,
message: "The compacted document contains a list of lists as multiple lists have been compacted to the same term."
message:
"The compacted document contains a list of lists as multiple lists have been compacted to the same term."
else
compacted_item
end
@ -227,11 +297,13 @@ defmodule JSON.LD.Compaction do
# 7.6.5)
if container in ~w[@language @index] do
map_object = result[item_active_property] || %{}
compacted_item =
if container == "@language" and
is_map(compacted_item) and Map.has_key?(compacted_item, "@value"),
do: compacted_item["@value"],
else: compacted_item
map_key = expanded_item[container]
map_object = merge_compacted_value(map_object, map_key, compacted_item)
Map.put(result, item_active_property, map_object)
@ -239,10 +311,12 @@ defmodule JSON.LD.Compaction do
# 7.6.6)
else
compacted_item =
if !is_list(compacted_item) and (!compact_arrays or
if !is_list(compacted_item) and
(!compact_arrays or
container in ~w[@set @list] or expanded_property in ~w[@list @graph]),
do: [compacted_item],
else: compacted_item
merge_compacted_value(result, item_active_property, compacted_item)
end
end)
@ -251,42 +325,53 @@ defmodule JSON.LD.Compaction do
end
defp merge_compacted_value(map, key, value) do
Map.update map, key, value, fn
Map.update(map, key, value, fn
old_value when is_list(old_value) and is_list(value) ->
old_value ++ value
old_value when is_list(old_value) ->
old_value ++ [value]
old_value when is_list(value) ->
[old_value | value]
old_value ->
[old_value, value]
end)
end
end
@doc """
IRI Compaction
Details at <https://www.w3.org/TR/json-ld-api/#iri-compaction>
"""
def compact_iri(iri, active_context, inverse_context,
value \\ nil, vocab \\ false, reverse \\ false)
def compact_iri(
iri,
active_context,
inverse_context,
value \\ nil,
vocab \\ false,
reverse \\ false
)
# 1) If iri is null, return null.
def compact_iri(nil, _, _, _, _, _), do: nil
def compact_iri(iri, active_context, inverse_context, value, vocab, reverse) do
# 2) If vocab is true and iri is a key in inverse context:
term = if vocab && Map.has_key?(inverse_context, iri) do
term =
if vocab && Map.has_key?(inverse_context, iri) do
# 2.1) Initialize default language to active context's default language, if it has one, otherwise to @none.
# TODO: Spec fixme: This step is effectively useless; see Spec fixme on step 2.6.3
# default_language = active_context.default_language || "@none"
# 2.3) Initialize type/language to @language, and type/language value to @null. These two variables will keep track of the preferred type mapping or language mapping for a term, based on what is compatible with value.
type_language = "@language"
type_language_value = "@null"
# 2.2) Initialize containers to an empty array. This array will be used to keep track of an ordered list of preferred container mappings for a term, based on what is compatible with value.
# 2.4) If value is a JSON object that contains the key @index, then append the value @index to containers.
containers = if index?(value), do: ["@index"], else: []
{containers, type_language, type_language_value} =
cond do
# 2.5) If reverse is true, set type/language to @type, type/language value to @reverse, and append @set to containers.
@ -295,24 +380,27 @@ defmodule JSON.LD.Compaction do
type_language = "@type"
type_language_value = "@reverse"
{containers, type_language, type_language_value}
# 2.6) Otherwise, if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows:
list?(value) ->
# 2.6.1) If @index is a not key in value, then append @list to containers.
containers =
if not index?(value),
do: containers ++ ["@list"], else: containers
containers = if not index?(value), do: containers ++ ["@list"], else: containers
# 2.6.2) Initialize list to the array associated with the key @list in value.
list = value["@list"]
# 2.6.3) Initialize common type and common language to null. If list is empty, set common language to default language.
# TODO: Spec fixme: Setting common language to default language is effectively useless, since the only place it is used is the follow loop in 2.6.4, which is immediately left when the list is empty
{common_type, common_language} = {nil, nil}
{type_language, type_language_value} =
if Enum.empty?(list) do
{type_language, type_language_value}
else
# 2.6.4) For each item in list:
{common_type, common_language} = Enum.reduce_while list, {common_type, common_language},
fn (item, {common_type, common_language}) ->
{common_type, common_language} =
Enum.reduce_while(list, {common_type, common_language}, fn item,
{common_type,
common_language} ->
# 2.6.4.1) Initialize item language to @none and item type to @none.
{item_type, item_language} = {"@none", "@none"}
# 2.6.4.2) If item contains the key @value:
@ -322,50 +410,62 @@ defmodule JSON.LD.Compaction do
# 2.6.4.2.1) If item contains the key @language, then set item language to its associated value.
Map.has_key?(item, "@language") ->
{item_type, item["@language"]}
# 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value.
Map.has_key?(item, "@type") ->
{item["@type"], item_language}
# 2.6.4.2.3) Otherwise, set item language to @null.
true ->
{item_type, "@null"}
end
# 2.6.4.3) Otherwise, set item type to @id.
else
{"@id", item_language}
end
common_language =
cond do
# 2.6.4.4) If common language is null, set it to item language.
is_nil(common_language) ->
item_language
# 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages.
item_language != common_language and Map.has_key?(item, "@value") ->
"@none"
true ->
common_language
end
common_type =
cond do
# 2.6.4.6) If common type is null, set it to item type.
is_nil(common_type) ->
item_type
# 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types.
item_type != common_type ->
"@none"
true ->
common_type
end
# 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items.
if common_language == "@none" and common_type == "@none" do
{:halt, {common_type, common_language}}
else
{:cont, {common_type, common_language}}
end
end
end)
# 2.6.5) If common language is null, set it to @none.
common_language = if is_nil(common_language), do: "@none", else: common_language
# 2.6.6) If common type is null, set it to @none.
common_type = if is_nil(common_type), do: "@none", else: common_type
# 2.6.7) If common type is not @none then set type/language to @type and type/language value to common type.
if common_type != "@none" do
type_language = "@type"
@ -377,7 +477,9 @@ defmodule JSON.LD.Compaction do
{type_language, type_language_value}
end
end
{containers, type_language, type_language_value}
# 2.7) Otherwise
true ->
# 2.7.1) If value is a value object:
@ -398,20 +500,26 @@ defmodule JSON.LD.Compaction do
{containers, type_language, type_language_value}
end
end
# 2.7.2) Otherwise, set type/language to @type and set type/language value to @id.
else
type_language = "@type"
type_language_value = "@id"
{containers, type_language, type_language_value}
end
# 2.7.3) Append @set to containers.
containers = containers ++ ["@set"]
{containers, type_language, type_language_value}
end
# 2.8) Append @none to containers. This represents the non-existence of a container mapping, and it will be the last container mapping value to be checked as it is the most generic.
containers = containers ++ ["@none"]
# 2.9) If type/language value is null, set it to @null. This is the key under which null values are stored in the inverse context entry.
type_language_value = if is_nil(type_language_value), do: "@null", else: type_language_value
type_language_value =
if is_nil(type_language_value), do: "@null", else: type_language_value
# 2.10) Initialize preferred values to an empty array. This array will indicate, in order, the preferred values for a term's type mapping or language mapping.
preferred_values = []
# 2.11) If type/language value is @reverse, append @reverse to preferred values.
@ -419,38 +527,49 @@ defmodule JSON.LD.Compaction do
if type_language_value == "@reverse",
do: preferred_values ++ ["@reverse"],
else: preferred_values
# 2.12) If type/language value is @id or @reverse and value has an @id member:
preferred_values =
if type_language_value in ~w[@id @reverse] and is_map(value) and Map.has_key?(value, "@id") do
if type_language_value in ~w[@id @reverse] and is_map(value) and
Map.has_key?(value, "@id") do
# 2.12.1) If the result of using the IRI compaction algorithm, passing active context, inverse context, the value associated with the @id key in value for iri, true for vocab, and true for document relative has a term definition in the active context with an IRI mapping that equals the value associated with the @id key in value, then append @vocab, @id, and @none, in that order, to preferred values.
# TODO: Spec fixme? document_relative is not a specified parameter of compact_iri
compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true)
if (term_def = active_context.term_defs[compact_id]) && term_def.iri_mapping == value["@id"] do
if (term_def = active_context.term_defs[compact_id]) &&
term_def.iri_mapping == value["@id"] do
preferred_values ++ ~w[@vocab @id @none]
# 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to preferred values.
else
preferred_values ++ ~w[@id @vocab @none]
end
# 2.13) Otherwise, append type/language value and @none, in that order, to preferred values.
else
preferred_values ++ [type_language_value, "@none"]
end
# 2.14) Initialize term to the result of the Term Selection algorithm, passing inverse context, iri, containers, type/language, and preferred values.
select_term(inverse_context, iri, containers, type_language, preferred_values)
end
cond do
# 2.15) If term is not null, return term.
not is_nil(term) ->
term
# 3) At this point, there is no simple term that iri can be compacted to. If vocab is true and active context has a vocabulary mapping:
# 3.1) If iri begins with the vocabulary mapping's value but is longer, then initialize suffix to the substring of iri that does not match. If suffix does not have a term definition in active context, then return suffix.
vocab && active_context.vocab && String.starts_with?(iri, active_context.vocab) ->
suffix = String.replace_prefix(iri, active_context.vocab, "")
if suffix != "" && is_nil(active_context.term_defs[suffix]) do
String.replace_prefix(iri, active_context.vocab, "")
else
create_compact_iri(iri, active_context, value, vocab)
end
true ->
create_compact_iri(iri, active_context, value, vocab)
end
@ -458,20 +577,25 @@ defmodule JSON.LD.Compaction do
defp create_compact_iri(iri, active_context, value, vocab) do
# 4) The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any.
compact_iri =
# 5) For each key term and value term definition in the active context:
Enum.reduce(active_context.term_defs, nil, fn ({term, term_def}, compact_iri) ->
compact_iri =
Enum.reduce(active_context.term_defs, nil, fn {term, term_def}, compact_iri ->
cond do
# 5.1) If the term contains a colon (:), then continue to the next term because terms with colons can't be used as prefixes.
String.contains?(term, ":") ->
compact_iri
# 5.2) If the term definition is null, its IRI mapping equals iri, or its IRI mapping is not a substring at the beginning of iri, the term cannot be used as a prefix because it is not a partial match with iri. Continue with the next term.
is_nil(term_def) || term_def.iri_mapping == iri ||
not String.starts_with?(iri, term_def.iri_mapping) ->
compact_iri
true ->
# 5.3) Initialize candidate by concatenating term, a colon (:), and the substring of iri that follows after the value of the term definition's IRI mapping.
candidate = term <> ":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1))
candidate =
term <>
":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1))
# 5.4) If either compact IRI is null or candidate is shorter or the same length but lexicographically less than compact IRI and candidate does not have a term definition in active context or if the term definition has an IRI mapping that equals iri and value is null, set compact IRI to candidate.
# TODO: Spec fixme: The specified expression is pretty ambiguous without brackets ...
# TODO: Spec fixme: "if the term definition has an IRI mapping that equals iri" is already catched in 5.2, so will never happen here ...
@ -484,13 +608,16 @@ defmodule JSON.LD.Compaction do
end
end
end)
cond do
# 6) If compact IRI is not null, return compact IRI.
not is_nil(compact_iri) ->
compact_iri
# 7) If vocab is false then transform iri to a relative IRI using the document's base IRI.
not vocab ->
remove_base(iri, Context.base(active_context))
# 8) Finally, return iri as is.
true ->
iri
@ -506,11 +633,14 @@ defmodule JSON.LD.Compaction do
defp remove_base(iri, base) do
base_len = String.length(base)
if String.starts_with?(iri, base) and String.at(iri, base_len) in ~w(? #) do
String.split_at(iri, base_len) |> elem(1)
else
case URI.parse(base) do
%URI{path: nil} -> iri
%URI{path: nil} ->
iri
base ->
do_remove_base(iri, %URI{base | path: parent_path(base.path)}, 0)
end
@ -519,6 +649,7 @@ defmodule JSON.LD.Compaction do
defp do_remove_base(iri, base, index) do
base_str = URI.to_string(base)
cond do
String.starts_with?(iri, base_str) ->
case String.duplicate("../", index) <>
@ -526,13 +657,17 @@ defmodule JSON.LD.Compaction do
"" -> "./"
rel -> rel
end
base.path == "/" -> iri
base.path == "/" ->
iri
true ->
do_remove_base(iri, %URI{base | path: parent_path(base.path)}, index + 1)
end
end
defp parent_path("/"), do: "/"
defp parent_path(path) do
case Path.dirname(String.trim_trailing(path, "/")) do
"/" -> "/"
@ -549,16 +684,21 @@ defmodule JSON.LD.Compaction do
term_def = active_context.term_defs[active_property]
# 1) Initialize number members to the number of members value contains.
number_members = Enum.count(value)
# 2) If value has an @index member and the container mapping associated to active property is set to @index, decrease number members by 1.
number_members =
if term_def != nil and Map.has_key?(value, "@index") and
term_def.container_mapping == "@index",
do: number_members - 1, else: number_members
do: number_members - 1,
else: number_members
# 3) If number members is greater than 2, return value as it cannot be compacted.
unless number_members > 2 do
{type_mapping, language_mapping} = if term_def,
{type_mapping, language_mapping} =
if term_def,
do: {term_def.type_mapping, term_def.language_mapping},
else: {nil, nil}
cond do
# 4) If value has an @id member
id = Map.get(value, "@id") ->
@ -566,28 +706,33 @@ defmodule JSON.LD.Compaction do
# 4.1) If number members is 1 and the type mapping of active property is set to @id, return the result of using the IRI compaction algorithm, passing active context, inverse context, and the value of the @id member for iri.
number_members == 1 and type_mapping == "@id" ->
compact_iri(id, active_context, inverse_context)
# 4.2) Otherwise, if number members is 1 and the type mapping of active property is set to @vocab, return the result of using the IRI compaction algorithm, passing active context, inverse context, the value of the @id member for iri, and true for vocab.
number_members == 1 and type_mapping == "@vocab" ->
compact_iri(id, active_context, inverse_context, nil, true)
# 4.3) Otherwise, return value as is.
true ->
value
end
# 5) Otherwise, if value has an @type member whose value matches the type mapping of active property, return the value associated with the @value member of value.
(type = Map.get(value, "@type")) && type == type_mapping ->
value["@value"]
# 6) Otherwise, if value has an @language member whose value matches the language mapping of active property, return the value associated with the @value member of value.
(language = Map.get(value, "@language")) &&
# TODO: Spec fixme: doesn't specify to check default language as well
(language = Map.get(value, "@language")) &&
language in [language_mapping, active_context.default_language] ->
value["@value"]
true ->
# 7) Otherwise, if number members equals 1 and either the value of the @value member is not a string, or the active context has no default language, or the language mapping of active property is set to null,, return the value associated with the @value member.
value_value = value["@value"]
# TODO: Spec fixme: doesn't specify to check default language as well
if number_members == 1 and
(not is_binary(value_value) or
!active_context.default_language or
# TODO: Spec fixme: doesn't specify to check default language as well
Context.language(active_context, active_property) == nil) do
value_value
# 8) Otherwise, return value as is.
@ -607,12 +752,12 @@ defmodule JSON.LD.Compaction do
"""
def select_term(inverse_context, iri, containers, type_language, preferred_values) do
container_map = inverse_context[iri]
Enum.find_value containers, fn container ->
Enum.find_value(containers, fn container ->
if type_language_map = container_map[container] do
value_map = type_language_map[type_language]
Enum.find_value preferred_values, fn item -> value_map[item] end
Enum.find_value(preferred_values, fn item -> value_map[item] end)
end
end)
end
end
end

View File

@ -11,26 +11,24 @@ defmodule JSON.LD.Context do
alias JSON.LD.Context.TermDefinition
alias RDF.IRI
def base(%JSON.LD.Context{base_iri: false, api_base_iri: api_base_iri}),
do: api_base_iri
def base(%JSON.LD.Context{base_iri: base_iri}),
do: base_iri
def new(options \\ %JSON.LD.Options{}),
do: %JSON.LD.Context{api_base_iri: JSON.LD.Options.new(options).base}
def create(%{"@context" => json_ld_context}, options),
do: new(options) |> update(json_ld_context, [], options)
def update(active, local, remote \\ [], options \\ %JSON.LD.Options{})
def update(%JSON.LD.Context{} = active, local, remote, options) when is_list(local) do
Enum.reduce local, active, fn (local, result) ->
Enum.reduce(local, active, fn local, result ->
do_update(result, local, remote, options)
end
end)
end
# 2) If local context is not an array, set it to an array containing only local context.
@ -38,7 +36,6 @@ defmodule JSON.LD.Context do
update(active, [local], remote, options)
end
# 3.1) If context is null, set result to a newly-initialized active context and continue with the next context. The base IRI of the active context is set to the IRI of the currently being processed document (which might be different from the currently being processed context), if available; otherwise to null. If set, the base option of a JSON-LD API Implementation overrides the base IRI.
defp do_update(%JSON.LD.Context{}, nil, _remote, options) do
new(options)
@ -54,6 +51,7 @@ defmodule JSON.LD.Context do
raise JSON.LD.RecursiveContextInclusionError,
message: "Recursive context inclusion: #{local}"
end
remote = remote ++ [local]
# 3.2.3)
@ -61,25 +59,38 @@ defmodule JSON.LD.Context do
document =
case apply(document_loader, :load, [local, options]) do
{:ok, result} -> result.document
{:error, reason} -> raise JSON.LD.LoadingRemoteContextFailedError,
message: "Could not load remote context (#{local}): #{inspect reason}"
{:ok, result} ->
result.document
{:error, reason} ->
raise JSON.LD.LoadingRemoteContextFailedError,
message: "Could not load remote context (#{local}): #{inspect(reason)}"
end
document = cond do
is_map(document) -> document
is_binary(document) -> case Jason.decode(document) do
{:ok, result} -> result
{:error, reason} -> raise JSON.LD.InvalidRemoteContextError,
message: "Context is not a valid JSON document: #{inspect reason}"
end
true -> raise JSON.LD.InvalidRemoteContextError,
message: "Context is not a valid JSON object: #{inspect document}"
end
document =
cond do
is_map(document) ->
document
local = document["@context"] ||
is_binary(document) ->
case Jason.decode(document) do
{:ok, result} ->
result
{:error, reason} ->
raise JSON.LD.InvalidRemoteContextError,
message: "Invalid remote context: No @context key in #{inspect document}"
message: "Context is not a valid JSON document: #{inspect(reason)}"
end
true ->
raise JSON.LD.InvalidRemoteContextError,
message: "Context is not a valid JSON object: #{inspect(document)}"
end
local =
document["@context"] ||
raise JSON.LD.InvalidRemoteContextError,
message: "Invalid remote context: No @context key in #{inspect(document)}"
# 3.2.4) - 3.2.5)
do_update(active, local, remote, options)
@ -100,45 +111,56 @@ defmodule JSON.LD.Context do
# 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted.
defp do_update(_, local, _, _),
do: raise JSON.LD.InvalidLocalContextError,
message: "#{inspect local} is not a valid @context value"
do:
raise(JSON.LD.InvalidLocalContextError,
message: "#{inspect(local)} is not a valid @context value"
)
defp set_base(active, false, _),
do: active
defp set_base(active, _, remote) when is_list(remote) and length(remote) > 0,
do: active
defp set_base(active, base, _) do
cond do
# TODO: this slightly differs from the spec, due to our false special value for base_iri; add more tests
is_nil(base) or IRI.absolute?(base) ->
%JSON.LD.Context{active | base_iri: base}
active.base_iri ->
%JSON.LD.Context{active | base_iri: absolute_iri(base, active.base_iri)}
true ->
raise JSON.LD.InvalidBaseIRIError,
message: "#{inspect base} is a relative IRI, but no active base IRI defined"
message: "#{inspect(base)} is a relative IRI, but no active base IRI defined"
end
end
defp set_vocab(active, false), do: active
defp set_vocab(active, vocab) do
if is_nil(vocab) or IRI.absolute?(vocab) or blank_node_id?(vocab) do
%JSON.LD.Context{active | vocab: vocab}
else
raise JSON.LD.InvalidVocabMappingError,
message: "#{inspect vocab} is not a valid vocabulary mapping"
message: "#{inspect(vocab)} is not a valid vocabulary mapping"
end
end
defp set_language(active, false), do: active
defp set_language(active, nil),
do: %JSON.LD.Context{active | default_language: nil}
defp set_language(active, language) when is_binary(language),
do: %JSON.LD.Context{active | default_language: String.downcase(language)}
defp set_language(_, language),
do: raise JSON.LD.InvalidDefaultLanguageError,
message: "#{inspect language} is not a valid language"
do:
raise(JSON.LD.InvalidDefaultLanguageError,
message: "#{inspect(language)} is not a valid language"
)
def language(active, term) do
case Map.get(active.term_defs, term, %TermDefinition{}).language_mapping do
@ -149,9 +171,10 @@ defmodule JSON.LD.Context do
defp create_term_definitions(active, local, defined \\ %{}) do
{active, _} =
Enum.reduce local, {active, defined}, fn ({term, value}, {active, defined}) ->
Enum.reduce(local, {active, defined}, fn {term, value}, {active, defined} ->
create_term_definition(active, local, term, value, defined)
end
end)
active
end
@ -168,25 +191,41 @@ defmodule JSON.LD.Context do
def create_term_definition(active, local, term, value, defined) do
# 3)
if term in JSON.LD.keywords,
do: raise JSON.LD.KeywordRedefinitionError,
message: "#{inspect term} is a keyword and can not be defined in context"
if term in JSON.LD.keywords(),
do:
raise(JSON.LD.KeywordRedefinitionError,
message: "#{inspect(term)} is a keyword and can not be defined in context"
)
# 1)
case defined[term] do
true -> {active, defined}
false -> raise JSON.LD.CyclicIRIMappingError #, message: "#{inspect term} .."
nil -> do_create_term_definition(active, local, term, value,
Map.put(defined, term, false)) # 2)
true ->
{active, defined}
# , message: "#{inspect term} .."
false ->
raise JSON.LD.CyclicIRIMappingError
nil ->
do_create_term_definition(
active,
local,
term,
value,
# 2)
Map.put(defined, term, false)
)
end
end
defp do_create_term_definition(active, _local, term, nil, defined) do
{
# (if Map.has_key?(active.term_defs, term),
# do: put_in(active, [:term_defs, term], nil),
# else: raise "NotImplemented"),
# (if Map.has_key?(active.term_defs, term),
# do: put_in(active, [:term_defs, term], nil),
# else: raise "NotImplemented"),
%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, nil)},
Map.put(defined, term, true)}
Map.put(defined, term, true)
}
end
defp do_create_term_definition(active, local, term, %{"@id" => nil}, defined),
@ -196,46 +235,55 @@ defmodule JSON.LD.Context do
do: do_create_term_definition(active, local, term, %{"@id" => value}, defined)
defp do_create_term_definition(active, local, term, %{} = value, defined) do
definition = %TermDefinition{} # 9)
# 9)
definition = %TermDefinition{}
{definition, active, defined} =
do_create_type_definition(definition, active, local, value, defined)
{done, definition, active, defined} =
do_create_reverse_definition(definition, active, local, value, defined)
{definition, active, defined} =
unless done do
{definition, active, defined} =
do_create_id_definition(definition, active, local, term, value, defined)
definition = do_create_container_definition(definition, value)
definition = do_create_language_definition(definition, value)
{definition, active, defined}
else
{definition, active, defined}
end
# 18 / 11.6) Set the term definition of term in active context to definition and set the value associated with defined's key term to true.
{%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, definition)},
Map.put(defined, term, true)}
end
defp do_create_term_definition(_, _, _, value, _),
do: raise JSON.LD.InvalidTermDefinitionError,
message: "#{inspect value} is not a valid term definition"
do:
raise(JSON.LD.InvalidTermDefinitionError,
message: "#{inspect(value)} is not a valid term definition"
)
# 10.1)
# TODO: RDF.rb implementation says: "SPEC FIXME: @type may be nil"
defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type),
do: raise JSON.LD.InvalidTypeMappingError,
message: "#{inspect type} is not a valid type mapping"
do:
raise(JSON.LD.InvalidTypeMappingError,
message: "#{inspect(type)} is not a valid type mapping"
)
# 10.2) and 10.3)
defp do_create_type_definition(definition, active, local, %{"@type" => type}, defined) do
{expanded_type, active, defined} =
expand_iri(type, active, false, true, local, defined)
{expanded_type, active, defined} = expand_iri(type, active, false, true, local, defined)
if IRI.absolute?(expanded_type) or expanded_type in ~w[@id @vocab] do
{%TermDefinition{definition | type_mapping: expanded_type}, active, defined}
else
raise JSON.LD.InvalidTypeMappingError,
message: "#{inspect type} is not a valid type mapping"
message: "#{inspect(type)} is not a valid type mapping"
end
end
@ -243,35 +291,52 @@ defmodule JSON.LD.Context do
do: {definition, active, defined}
# 11) If value contains the key @reverse
defp do_create_reverse_definition(definition, active, local,
%{"@reverse" => reverse} = value, defined) do
defp do_create_reverse_definition(
definition,
active,
local,
%{"@reverse" => reverse} = value,
defined
) do
cond do
Map.has_key?(value, "@id") -> # 11.1)
# 11.1)
Map.has_key?(value, "@id") ->
raise JSON.LD.InvalidReversePropertyError,
message: "#{inspect reverse} is not a valid reverse property"
not is_binary(reverse) -> # 11.2)
message: "#{inspect(reverse)} is not a valid reverse property"
# 11.2)
not is_binary(reverse) ->
raise JSON.LD.InvalidIRIMappingError,
message: "Expected String for @reverse value. got #{inspect reverse}"
true -> # 11.3)
message: "Expected String for @reverse value. got #{inspect(reverse)}"
# 11.3)
true ->
{expanded_reverse, active, defined} =
expand_iri(reverse, active, false, true, local, defined)
definition =
if IRI.absolute?(expanded_reverse) or blank_node_id?(expanded_reverse) do
%TermDefinition{definition | iri_mapping: expanded_reverse}
else
raise JSON.LD.InvalidIRIMappingError,
message: "Non-absolute @reverse IRI: #{inspect reverse}"
message: "Non-absolute @reverse IRI: #{inspect(reverse)}"
end
# 11.4)
definition =
case Map.get(value, "@container", {false}) do # 11.4)
case Map.get(value, "@container", {false}) do
{false} ->
definition
container when is_nil(container) or container in ~w[@set @index] ->
%TermDefinition{definition | container_mapping: container}
_ ->
raise JSON.LD.InvalidReversePropertyError,
message: "#{inspect reverse} is not a valid reverse property; reverse properties only support set- and index-containers"
message:
"#{inspect(reverse)} is not a valid reverse property; reverse properties only support set- and index-containers"
end
# 11.5) & 11.6)
{true, %TermDefinition{definition | reverse_property: true}, active, defined}
end
@ -280,29 +345,32 @@ defmodule JSON.LD.Context do
defp do_create_reverse_definition(definition, active, _, _, defined),
do: {false, definition, active, defined}
# 13)
defp do_create_id_definition(definition, active, local, term,
%{"@id" => id}, defined) when id != term do
defp do_create_id_definition(definition, active, local, term, %{"@id" => id}, defined)
when id != term do
# 13.1)
if is_binary(id) do
# 13.2)
{expanded_id, active, defined} =
expand_iri(id, active, false, true, local, defined)
{expanded_id, active, defined} = expand_iri(id, active, false, true, local, defined)
cond do
expanded_id == "@context" ->
raise JSON.LD.InvalidKeywordAliasError,
message: "cannot alias @context"
JSON.LD.keyword?(expanded_id) or
IRI.absolute?(expanded_id) or
blank_node_id?(expanded_id) ->
{%TermDefinition{definition | iri_mapping: expanded_id}, active, defined}
true ->
raise JSON.LD.InvalidIRIMappingError,
message: "#{inspect id} is not a valid IRI mapping; resulting IRI mapping should be a keyword, absolute IRI or blank node"
message:
"#{inspect(id)} is not a valid IRI mapping; resulting IRI mapping should be a keyword, absolute IRI or blank node"
end
else # 13.1)
else
raise JSON.LD.InvalidIRIMappingError,
message: "expected value of @id to be a string, but got #{inspect id}"
message: "expected value of @id to be a string, but got #{inspect(id)}"
end
end
@ -314,59 +382,72 @@ defmodule JSON.LD.Context do
case compact_iri_parts(term) do
[prefix, suffix] ->
prefix_mapping = local[prefix]
{active, defined} =
if prefix_mapping do
do_create_term_definition(active, local, prefix, prefix_mapping, defined)
else
{active, defined}
end
if prefix_def = active.term_defs[prefix] do
{%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active, defined}
{%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active,
defined}
else
{%TermDefinition{definition | iri_mapping: term}, active, defined}
end
nil -> {%TermDefinition{definition | iri_mapping: term}, active, defined}
nil ->
{%TermDefinition{definition | iri_mapping: term}, active, defined}
end
# 15)
else
if active.vocab do
{%TermDefinition{definition | iri_mapping: active.vocab <> term}, active, defined}
else
raise JSON.LD.InvalidIRIMappingError,
message: "#{inspect term} is not a valid IRI mapping; relative term definition without vocab mapping"
message:
"#{inspect(term)} is not a valid IRI mapping; relative term definition without vocab mapping"
end
end
end
# 16.1)
defp do_create_container_definition(_, %{"@container" => container})
when container not in ~w[@list @set @index @language],
do: raise JSON.LD.InvalidContainerMappingError,
message: "#{inspect container} is not a valid container mapping; @container must be either @list, @set, @index, or @language"
do:
raise(JSON.LD.InvalidContainerMappingError,
message:
"#{inspect(container)} is not a valid container mapping; @container must be either @list, @set, @index, or @language"
)
# 16.2)
defp do_create_container_definition(definition, %{"@container" => container}),
do: %TermDefinition{definition | container_mapping: container}
defp do_create_container_definition(definition, _),
do: definition
# 17)
defp do_create_language_definition(definition, %{"@language" => language} = value) do
unless Map.has_key?(value, "@type") do
case language do
language when is_binary(language) ->
%TermDefinition{definition | language_mapping: String.downcase(language)}
language when is_nil(language) ->
%TermDefinition{definition | language_mapping: nil}
_ ->
raise JSON.LD.InvalidLanguageMappingError,
message: "#{inspect language} is not a valid language mapping; @language must be a string or null"
message:
"#{inspect(language)} is not a valid language mapping; @language must be a string or null"
end
end
end
defp do_create_language_definition(definition, _), do: definition
defp do_create_language_definition(definition, _), do: definition
@doc """
Inverse Context Creation algorithm
@ -376,10 +457,11 @@ defmodule JSON.LD.Context do
def inverse(%JSON.LD.Context{} = context) do
# 2) Initialize default language to @none. If the active context has a default language, set default language to it.
default_language = context.default_language || "@none"
# 3) For each key term and value term definition in the active context, ordered by shortest term first (breaking ties by choosing the lexicographically least term)
context.term_defs
|> Enum.sort_by(fn {term, _} -> String.length(term) end)
|> Enum.reduce(%{}, fn ({term, term_def}, result) ->
|> Enum.reduce(%{}, fn {term, term_def}, result ->
# 3.1) If the term definition is null, term cannot be selected during compaction, so continue to the next term.
if term_def do
# 3.2) Initialize container to @none. If there is a container mapping in term definition, set container to its associated value.
@ -395,15 +477,18 @@ defmodule JSON.LD.Context do
# 3.8) If the term definition indicates that the term represents a reverse property
%TermDefinition{reverse_property: true} ->
{Map.put_new(type_map, "@reverse", term), language_map}
# 3.9) Otherwise, if term definition has a type mapping
%TermDefinition{type_mapping: type_mapping}
when type_mapping != false ->
{Map.put_new(type_map, type_mapping, term), language_map}
# 3.10) Otherwise, if term definition has a language mapping (might be null)
%TermDefinition{language_mapping: language_mapping}
when language_mapping != false ->
language = language_mapping || "@null"
{type_map, Map.put_new(language_map, language, term)}
# 3.11) Otherwise
_ ->
language_map = Map.put_new(language_map, default_language, term)
@ -415,10 +500,10 @@ defmodule JSON.LD.Context do
result
|> Map.put_new(iri, %{})
|> Map.update(iri, %{}, fn container_map ->
Map.put container_map, container, %{
Map.put(container_map, container, %{
"@type" => type_map,
"@language" => language_map,
}
"@language" => language_map
})
end)
else
result
@ -426,10 +511,15 @@ defmodule JSON.LD.Context do
end)
end
def empty?(%JSON.LD.Context{term_defs: term_defs, vocab: nil, base_iri: false, default_language: nil})
def empty?(%JSON.LD.Context{
term_defs: term_defs,
vocab: nil,
base_iri: false,
default_language: nil
})
when map_size(term_defs) == 0,
do: true
def empty?(_),
do: false
end

View File

@ -1,7 +1,7 @@
defmodule JSON.LD.Context.TermDefinition do
defstruct iri_mapping: nil,
reverse_property: false,
type_mapping: false, language_mapping: false,
type_mapping: false,
language_mapping: false,
container_mapping: nil
end

View File

@ -8,7 +8,6 @@ defmodule JSON.LD.Decoder do
alias JSON.LD.NodeIdentifierMap
alias RDF.{Dataset, Graph, NS}
@impl RDF.Serialization.Decoder
def decode(content, opts \\ []) do
with {:ok, json_ld_object} <- parse_json(content),
@ -19,62 +18,79 @@ defmodule JSON.LD.Decoder do
def to_rdf(element, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options) do
{:ok, node_id_map} = NodeIdentifierMap.start_link
{:ok, node_id_map} = NodeIdentifierMap.start_link()
try do
element
|> JSON.LD.expand(options)
|> JSON.LD.node_map(node_id_map)
|> Enum.sort_by(fn {graph_name, _} -> graph_name end)
|> Enum.reduce(Dataset.new, fn ({graph_name, graph}, dataset) ->
|> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset ->
unless relative_iri?(graph_name) do
rdf_graph =
graph
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce(Graph.new, fn ({subject, node}, rdf_graph) ->
|> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph ->
unless relative_iri?(subject) do
node
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(rdf_graph, fn ({property, values}, rdf_graph) ->
|> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph ->
cond do
property == "@type" ->
Graph.add rdf_graph,
node_to_rdf(subject), RDF.NS.RDF.type,
Graph.add(
rdf_graph,
node_to_rdf(subject),
RDF.NS.RDF.type(),
Enum.map(values, &node_to_rdf/1)
)
JSON.LD.keyword?(property) ->
rdf_graph
not options.produce_generalized_rdf and
blank_node_id?(property) ->
rdf_graph
relative_iri?(property) ->
rdf_graph
true ->
Enum.reduce values, rdf_graph, fn
(%{"@list" => list}, rdf_graph) ->
Enum.reduce(values, rdf_graph, fn
%{"@list" => list}, rdf_graph ->
with {list_triples, first} <-
list_to_rdf(list, node_id_map) do
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
|> Graph.add(list_triples)
end
(item, rdf_graph) ->
item, rdf_graph ->
case object_to_rdf(item) do
nil -> rdf_graph
nil ->
rdf_graph
object ->
Graph.add rdf_graph,
Graph.add(
rdf_graph,
{node_to_rdf(subject), node_to_rdf(property), object}
)
end
end
end)
end
end)
else
rdf_graph
end
end)
if Enum.empty?(rdf_graph) do
dataset
else
Dataset.add(dataset, rdf_graph,
if(graph_name == "@default", do: nil, else: graph_name))
Dataset.add(
dataset,
rdf_graph,
if(graph_name == "@default", do: nil, else: graph_name)
)
end
else
dataset
@ -95,11 +111,12 @@ defmodule JSON.LD.Decoder do
end
def node_to_rdf(nil), do: nil
def node_to_rdf(node) do
if blank_node_id?(node) do
node
|> String.trim_leading("_:")
|> RDF.bnode
|> RDF.bnode()
else
RDF.uri(node)
end
@ -113,28 +130,49 @@ defmodule JSON.LD.Decoder do
defp object_to_rdf(%{"@value" => value} = item) do
datatype = item["@type"]
{value, datatype} =
cond do
is_boolean(value) ->
value = value |> RDF.XSD.Boolean.new() |> RDF.XSD.Boolean.canonical() |> RDF.XSD.Boolean.lexical()
datatype = if is_nil(datatype), do: NS.XSD.boolean, else: datatype
value =
value
|> RDF.XSD.Boolean.new()
|> RDF.XSD.Boolean.canonical()
|> RDF.XSD.Boolean.lexical()
datatype = if is_nil(datatype), do: NS.XSD.boolean(), else: datatype
{value, datatype}
is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double)) ->
value = value |> RDF.XSD.Double.new() |> RDF.XSD.Double.canonical() |> RDF.XSD.Double.lexical()
datatype = if is_nil(datatype), do: NS.XSD.double, else: datatype
is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double())) ->
value =
value
|> RDF.XSD.Double.new()
|> RDF.XSD.Double.canonical()
|> RDF.XSD.Double.lexical()
datatype = if is_nil(datatype), do: NS.XSD.double(), else: datatype
{value, datatype}
is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer)) ->
value = value |> RDF.XSD.Integer.new() |> RDF.XSD.Integer.canonical() |> RDF.XSD.Integer.lexical()
datatype = if is_nil(datatype), do: NS.XSD.integer, else: datatype
is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer())) ->
value =
value
|> RDF.XSD.Integer.new()
|> RDF.XSD.Integer.canonical()
|> RDF.XSD.Integer.lexical()
datatype = if is_nil(datatype), do: NS.XSD.integer(), else: datatype
{value, datatype}
is_nil(datatype) ->
datatype =
if Map.has_key?(item, "@language") do
RDF.langString
RDF.langString()
else
NS.XSD.string
NS.XSD.string()
end
{value, datatype}
true ->
{value, datatype}
end
@ -149,22 +187,23 @@ defmodule JSON.LD.Decoder do
defp list_to_rdf(list, node_id_map) do
{list_triples, first, last} =
list
|> Enum.reduce({[], nil, nil}, fn (item, {list_triples, first, last}) ->
|> Enum.reduce({[], nil, nil}, fn item, {list_triples, first, last} ->
case object_to_rdf(item) do
nil -> {list_triples, first, last}
nil ->
{list_triples, first, last}
object ->
with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
if last do
{
list_triples ++
[{last, RDF.NS.RDF.rest, bnode},
{bnode, RDF.NS.RDF.first, object}],
[{last, RDF.NS.RDF.rest(), bnode}, {bnode, RDF.NS.RDF.first(), object}],
first,
bnode
}
else
{
list_triples ++ [{bnode, RDF.NS.RDF.first, object}],
list_triples ++ [{bnode, RDF.NS.RDF.first(), object}],
bnode,
bnode
}
@ -172,31 +211,31 @@ defmodule JSON.LD.Decoder do
end
end
end)
if last do
{list_triples ++ [{last, RDF.NS.RDF.rest, RDF.NS.RDF.nil}], first}
{list_triples ++ [{last, RDF.NS.RDF.rest(), RDF.NS.RDF.nil()}], first}
else
{[], RDF.NS.RDF.nil}
{[], RDF.NS.RDF.nil()}
end
end
# This is a much nicer and faster version, but the blank node numbering is reversed.
# Although this isn't relevant, I prefer to be more spec conform (for now).
# defp list_to_rdf(list, node_id_map) do
# list
# |> Enum.reverse
# |> Enum.reduce({[], RDF.NS.RDF.nil}, fn (item, {list_triples, last}) ->
# case object_to_rdf(item) do
# nil -> {list_triples, last}
# object ->
# with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
# {
# [{bnode, RDF.NS.RDF.first, object},
# {bnode, RDF.NS.RDF.rest, last } | list_triples],
# bnode
# }
# end
# end
# end)
# end
# This is a much nicer and faster version, but the blank node numbering is reversed.
# Although this isn't relevant, I prefer to be more spec conform (for now).
# defp list_to_rdf(list, node_id_map) do
# list
# |> Enum.reverse
# |> Enum.reduce({[], RDF.NS.RDF.nil}, fn (item, {list_triples, last}) ->
# case object_to_rdf(item) do
# nil -> {list_triples, last}
# object ->
# with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
# {
# [{bnode, RDF.NS.RDF.first, object},
# {bnode, RDF.NS.RDF.rest, last } | list_triples],
# bnode
# }
# end
# end
# end)
# end
end

View File

@ -7,5 +7,5 @@ defmodule JSON.LD.DocumentLoader do
alias JSON.LD.DocumentLoader.RemoteDocument
@callback load(String.t, JSON.LD.Options.t) :: {:ok, RemoteDocument.t} | {:error, any}
@callback load(String.t(), JSON.LD.Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
end

View File

@ -13,6 +13,6 @@ defmodule JSON.LD.DocumentLoader.Default do
defp http_get(url) do
HTTPoison.get(url, [accept: "application/ld+json"], follow_redirect: true)
rescue
e -> {:error, "HTTPoison failed: #{inspect e}"}
e -> {:error, "HTTPoison failed: #{inspect(e)}"}
end
end

View File

@ -1,7 +1,7 @@
defmodule JSON.LD.DocumentLoader.RemoteDocument do
@type t :: %__MODULE__{
context_url: String.t,
document_url: String.t,
context_url: String.t(),
document_url: String.t(),
document: any
}

View File

@ -6,10 +6,10 @@ defmodule JSON.LD.Encoder do
alias RDF.{IRI, BlankNode, Literal, XSD, NS}
@rdf_type to_string(RDF.NS.RDF.type)
@rdf_nil to_string(RDF.NS.RDF.nil)
@rdf_first to_string(RDF.NS.RDF.first)
@rdf_rest to_string(RDF.NS.RDF.rest)
@rdf_type to_string(RDF.NS.RDF.type())
@rdf_nil to_string(RDF.NS.RDF.nil())
@rdf_first to_string(RDF.NS.RDF.first())
@rdf_rest to_string(RDF.NS.RDF.rest())
@rdf_list to_string(RDF.uri(RDF.NS.RDF.List))
@impl RDF.Serialization.Encoder
@ -38,66 +38,76 @@ defmodule JSON.LD.Encoder do
def from_rdf!(%RDF.Dataset{} = dataset, options) do
with options = JSON.LD.Options.new(options) do
graph_map =
Enum.reduce RDF.Dataset.graphs(dataset), %{},
fn graph, graph_map ->
Enum.reduce(RDF.Dataset.graphs(dataset), %{}, fn graph, graph_map ->
# 3.1)
name = to_string(graph.name || "@default")
# 3.3)
graph_map =
if graph.name && !get_in(graph_map, ["@default", name]) do
Map.update graph_map, "@default", %{name => %{"@id" => name}},
fn default_graph ->
Map.update(graph_map, "@default", %{name => %{"@id" => name}}, fn default_graph ->
Map.put(default_graph, name, %{"@id" => name})
end
end)
else
graph_map
end
# 3.2 + 3.4)
Map.put(graph_map, name,
node_map_from_graph(graph, Map.get(graph_map, name, %{}),
options.use_native_types, options.use_rdf_type))
end
Map.put(
graph_map,
name,
node_map_from_graph(
graph,
Map.get(graph_map, name, %{}),
options.use_native_types,
options.use_rdf_type
)
)
end)
# 4)
graph_map =
Enum.reduce graph_map, %{}, fn ({name, graph_object}, graph_map) ->
Enum.reduce(graph_map, %{}, fn {name, graph_object}, graph_map ->
Map.put(graph_map, name, convert_list(graph_object))
end
end)
# 5+6)
Map.get(graph_map, "@default", %{})
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce([], fn ({subject, node}, result) ->
|> Enum.reduce([], fn {subject, node}, result ->
# 6.1)
node =
if Map.has_key?(graph_map, subject) do
Map.put node, "@graph",
Map.put(
node,
"@graph",
graph_map[subject]
|> Enum.sort_by(fn {s, _} -> s end)
|> Enum.reduce([], fn ({_s, n}, graph_nodes) ->
|> Enum.reduce([], fn {_s, n}, graph_nodes ->
n = Map.delete(n, "usages")
if map_size(n) == 1 and Map.has_key?(n, "@id") do
graph_nodes
else
[n | graph_nodes]
end
end)
|> Enum.reverse
|> Enum.reverse()
)
else
node
end
# 6.2)
node = Map.delete(node, "usages")
if map_size(node) == 1 and Map.has_key?(node, "@id") do
result
else
[node | result]
end
end)
|> Enum.reverse
|> Enum.reverse()
end
end
@ -106,30 +116,34 @@ defmodule JSON.LD.Encoder do
# 3.5)
defp node_map_from_graph(graph, current, use_native_types, use_rdf_type) do
Enum.reduce(graph, current, fn ({subject, predicate, object}, node_map) ->
{subject, predicate, node_object} =
{to_string(subject), to_string(predicate), nil}
Enum.reduce(graph, current, fn {subject, predicate, object}, node_map ->
{subject, predicate, node_object} = {to_string(subject), to_string(predicate), nil}
node = Map.get(node_map, subject, %{"@id" => subject})
{node_object, node_map} =
if is_node_object = (match?(%IRI{}, object) || match?(%BlankNode{}, object)) do
if is_node_object = match?(%IRI{}, object) || match?(%BlankNode{}, object) do
node_object = to_string(object)
node_map = Map.put_new(node_map, node_object, %{"@id" => node_object})
{node_object, node_map}
else
{node_object, node_map}
end
{node, node_map} =
if is_node_object and !use_rdf_type and predicate == @rdf_type do
node = Map.update(node, "@type", [node_object], fn types ->
node =
Map.update(node, "@type", [node_object], fn types ->
if node_object in types do
types
else
types ++ [node_object]
end
end)
{node, node_map}
else
value = rdf_to_object(object, use_native_types)
node =
Map.update(node, predicate, [value], fn objects ->
if value in objects do
@ -138,13 +152,15 @@ defmodule JSON.LD.Encoder do
objects ++ [value]
end
end)
node_map =
if is_node_object do
usage = %{
"node" => node,
"property" => predicate,
"value" => value,
"value" => value
}
Map.update(node_map, node_object, %{"usages" => [usage]}, fn object_node ->
Map.update(object_node, "usages", [usage], fn usages ->
usages ++ [usage]
@ -153,8 +169,10 @@ defmodule JSON.LD.Encoder do
else
node_map
end
{node, node_map}
end
Map.put(node_map, subject, node)
end)
|> update_node_usages
@ -163,38 +181,41 @@ defmodule JSON.LD.Encoder do
# This function is necessary because we have no references and must update the
# node member of the usage maps with later enhanced usages
defp update_node_usages(node_map) do
Enum.reduce node_map, node_map, fn
({subject, %{"usages" => _usages} = _node}, node_map) ->
update_in node_map, [subject, "usages"], fn usages ->
Enum.map usages, fn usage ->
Map.update! usage, "node", fn %{"@id" => subject} ->
Enum.reduce(node_map, node_map, fn
{subject, %{"usages" => _usages} = _node}, node_map ->
update_in(node_map, [subject, "usages"], fn usages ->
Enum.map(usages, fn usage ->
Map.update!(usage, "node", fn %{"@id" => subject} ->
node_map[subject]
end
end
end
(_, node_map) -> node_map
end
end)
end)
end)
_, node_map ->
node_map
end)
end
# This function is necessary because we have no references and use this
# instead to update the head by path
defp update_head(graph_object, path, old, new) do
update_in graph_object, path, fn objects ->
Enum.map objects, fn
update_in(graph_object, path, fn objects ->
Enum.map(objects, fn
^old -> new
current -> current
end
end
end)
end)
end
# 4)
defp convert_list(%{@rdf_nil => nil_node} = graph_object) do
Enum.reduce nil_node["usages"], graph_object,
Enum.reduce(
nil_node["usages"],
graph_object,
# 4.3.1)
fn (usage, graph_object) ->
fn usage, graph_object ->
# 4.3.2) + 4.3.3)
{list, list_nodes, [subject, property] = head_path, head} =
extract_list(usage)
{list, list_nodes, [subject, property] = head_path, head} = extract_list(usage)
# 4.3.4)
{skip, list, list_nodes, head_path, head} =
@ -214,62 +235,85 @@ defmodule JSON.LD.Encoder do
else
{false, list, list_nodes, head_path, head}
end
if skip do
graph_object
else
graph_object =
update_head graph_object, head_path, head,
update_head(
graph_object,
head_path,
head,
head
# 4.3.5)
|> Map.delete("@id")
# 4.3.6) isn't necessary, since we built the list in reverse order
# 4.3.7)
|> Map.put("@list", list)
)
# 4.3.8)
Enum.reduce(list_nodes, graph_object, fn (node_id, graph_object) ->
Enum.reduce(list_nodes, graph_object, fn node_id, graph_object ->
Map.delete(graph_object, node_id)
end)
end
end
)
end
defp convert_list(graph_object), do: graph_object
# 4.3.3)
defp extract_list(usage, list \\ [], list_nodes \\ [])
defp extract_list(
%{"node" => %{
%{
"node" =>
%{
# Spec FIXME: no mention of @id
"@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node
# contrary to spec we assume/require this to be even on the initial call to be a blank node
"@id" => id = "_:" <> _,
"usages" => [usage],
@rdf_first => [first],
@rdf_rest => [_rest],
@rdf_rest => [_rest]
} = node,
"property" => @rdf_rest}, list, list_nodes) when map_size(node) == 4 do
"property" => @rdf_rest
},
list,
list_nodes
)
when map_size(node) == 4 do
extract_list(usage, [first | list], [id | list_nodes])
end
defp extract_list(
%{"node" => %{
%{
"node" =>
%{
# Spec FIXME: no mention of @id
"@id" => id = ("_:" <> _), # contrary to spec we assume/require this to be even on the initial call to be a blank node
# contrary to spec we assume/require this to be even on the initial call to be a blank node
"@id" => id = "_:" <> _,
"@type" => [@rdf_list],
"usages" => [usage],
@rdf_first => [first],
@rdf_rest => [_rest],
@rdf_rest => [_rest]
} = node,
"property" => @rdf_rest}, list, list_nodes) when map_size(node) == 5 do
"property" => @rdf_rest
},
list,
list_nodes
)
when map_size(node) == 5 do
extract_list(usage, [first | list], [id | list_nodes])
end
defp extract_list(%{"node" => %{"@id" => subject}, "property" => property, "value" => head},
list, list_nodes),
defp extract_list(
%{"node" => %{"@id" => subject}, "property" => property, "value" => head},
list,
list_nodes
),
do: {list, list_nodes, [subject, property], head}
defp rdf_to_object(%IRI{} = iri, _use_native_types) do
%{"@id" => to_string(iri)}
end
@ -283,23 +327,27 @@ defmodule JSON.LD.Encoder do
value = Literal.value(literal)
converted_value = literal
type = nil
{converted_value, type, result} =
if use_native_types do
cond do
datatype == XSD.String ->
{value, type, result}
datatype == XSD.Boolean ->
if RDF.XSD.Boolean.valid?(literal) do
{value, type, result}
else
{converted_value, NS.XSD.boolean, result}
{converted_value, NS.XSD.boolean(), result}
end
datatype in [XSD.Integer, XSD.Double] ->
if Literal.valid?(literal) do
{value, type, result}
else
{converted_value, type, result}
end
true ->
{converted_value, Literal.datatype_id(literal), result}
end
@ -307,18 +355,23 @@ defmodule JSON.LD.Encoder do
cond do
datatype == RDF.LangString ->
{converted_value, type, Map.put(result, "@language", Literal.language(literal))}
datatype == XSD.String ->
{converted_value, type, result}
true ->
{Literal.lexical(literal), Literal.datatype_id(literal), result}
end
end
result = type && Map.put(result, "@type", to_string(type)) || result
Map.put(result, "@value",
match?(%Literal{}, converted_value) && Literal.lexical(converted_value) || converted_value)
end
result = (type && Map.put(result, "@type", to_string(type))) || result
Map.put(
result,
"@value",
(match?(%Literal{}, converted_value) && Literal.lexical(converted_value)) || converted_value
)
end
defp encode_json(value, opts) do
Jason.encode(value, opts)

View File

@ -187,7 +187,6 @@ defmodule JSON.LD.InvalidLanguageTaggedStringError do
defexception code: "invalid language-tagged string", message: nil
end
defmodule JSON.LD.InvalidLanguageTaggedValueError do
@moduledoc """
A number, true, or false with an associated language tag was detected.

View File

@ -3,17 +3,17 @@ defmodule JSON.LD.Expansion do
import JSON.LD.{IRIExpansion, Utils}
def expand(input, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
active_context = JSON.LD.Context.new(options)
do
active_context = JSON.LD.Context.new(options) do
active_context =
case options.expand_context do
%{"@context" => context} ->
JSON.LD.Context.update(active_context, context)
%{} = context ->
JSON.LD.Context.update(active_context, context)
nil ->
active_context
end
@ -21,11 +21,15 @@ defmodule JSON.LD.Expansion do
case do_expand(active_context, nil, input, options) do
result = %{"@graph" => graph} when map_size(result) == 1 ->
graph
nil ->
[]
result when not is_list(result) ->
[result]
result -> result
result ->
result
end
end
end
@ -50,17 +54,25 @@ defmodule JSON.LD.Expansion do
when is_list(element) do
term_def = active_context.term_defs[active_property]
container_mapping = term_def && term_def.container_mapping
element
|> Enum.reduce([], fn (item, result) ->
|> Enum.reduce([], fn item, result ->
expanded_item = do_expand(active_context, active_property, item, options)
if (active_property == "@list" or container_mapping == "@list") and
(is_list(expanded_item) or Map.has_key?(expanded_item, "@list")),
do: raise JSON.LD.ListOfListsError,
message: "List of lists in #{inspect element}"
do:
raise(JSON.LD.ListOfListsError,
message: "List of lists in #{inspect(element)}"
)
case expanded_item do
nil -> result
nil ->
result
list when is_list(list) ->
result ++ list
expanded_item ->
result ++ [expanded_item]
end
@ -77,44 +89,71 @@ defmodule JSON.LD.Expansion do
else
active_context
end
# 6) and 7)
result = element
|> Enum.sort_by(fn {key, _} -> key end)
|> Enum.reduce(%{}, fn ({key, value}, result) ->
if key != "@context" do # 7.1)
expanded_property = expand_iri(key, active_context, false, true)
if expanded_property && # 7.2)
(String.contains?(expanded_property, ":") || JSON.LD.keyword?(expanded_property)) do # 7.3)
if JSON.LD.keyword?(expanded_property) do # 7.4)
if active_property == "@reverse", # 7.4.1)
do: raise JSON.LD.InvalidReversePropertyMapError,
message: "An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
if Map.has_key?(result, expanded_property), # 7.4.2)
do: raise JSON.LD.CollidingKeywordsError,
message: "Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
expanded_value = case expanded_property do
"@id" -> # 7.4.3)
# 6) and 7)
result =
element
|> Enum.sort_by(fn {key, _} -> key end)
|> Enum.reduce(%{}, fn {key, value}, result ->
# 7.1)
if key != "@context" do
expanded_property = expand_iri(key, active_context, false, true)
# 7.2)
# 7.3)
if expanded_property &&
(String.contains?(expanded_property, ":") || JSON.LD.keyword?(expanded_property)) do
# 7.4)
# expanded_property is not a keyword
if JSON.LD.keyword?(expanded_property) do
# 7.4.1)
if active_property == "@reverse",
do:
raise(JSON.LD.InvalidReversePropertyMapError,
message:
"An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
)
# 7.4.2)
if Map.has_key?(result, expanded_property),
do:
raise(JSON.LD.CollidingKeywordsError,
message:
"Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
)
expanded_value =
case expanded_property do
# 7.4.3)
"@id" ->
if is_binary(value) do
expand_iri(value, active_context, true)
else
raise JSON.LD.InvalidIdValueError,
message: "#{inspect value} is not a valid @id value"
message: "#{inspect(value)} is not a valid @id value"
end
"@type" -> # 7.4.4)
# 7.4.4)
"@type" ->
cond do
is_binary(value) ->
expand_iri(value, active_context, true, true)
is_list(value) and Enum.all?(value, &is_binary/1) ->
Enum.map value, fn item ->
expand_iri(item, active_context, true, true) end
Enum.map(value, fn item ->
expand_iri(item, active_context, true, true)
end)
true ->
raise JSON.LD.InvalidTypeValueError,
message: "#{inspect value} is not a valid @type value"
message: "#{inspect(value)} is not a valid @type value"
end
"@graph" -> # 7.4.5)
# 7.4.5)
"@graph" ->
do_expand(active_context, "@graph", value, options)
"@value" -> # 7.4.6)
# 7.4.6)
"@value" ->
if scalar?(value) or is_nil(value) do
if is_nil(value) do
{:skip, Map.put(result, "@value", nil)}
@ -123,172 +162,237 @@ defmodule JSON.LD.Expansion do
end
else
raise JSON.LD.InvalidValueObjectValueError,
message: "#{inspect value} is not a valid value for the @value member of a value object; neither a scalar nor null"
message:
"#{inspect(value)} is not a valid value for the @value member of a value object; neither a scalar nor null"
end
"@language" -> # 7.4.7)
# 7.4.7)
"@language" ->
if is_binary(value),
do: String.downcase(value),
else: raise JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect value} is not a valid language-tag"
"@index" -> # 7.4.8)
else:
raise(JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect(value)} is not a valid language-tag"
)
# 7.4.8)
"@index" ->
if is_binary(value),
do: value,
else: raise JSON.LD.InvalidIndexValueError,
message: "#{inspect value} is not a valid @index value"
"@list" -> # 7.4.9)
if active_property in [nil, "@graph"] do # 7.4.9.1)
else:
raise(JSON.LD.InvalidIndexValueError,
message: "#{inspect(value)} is not a valid @index value"
)
# 7.4.9)
"@list" ->
# 7.4.9.1)
if active_property in [nil, "@graph"] do
{:skip, result}
else
value = do_expand(active_context, active_property, value, options)
# Spec FIXME: need to be sure that result is a list [from RDF.rb implementation]
value = if is_list(value),
value =
if is_list(value),
do: value,
else: [value]
# If expanded value is a list object, a list of lists error has been detected and processing is aborted.
# Spec FIXME: Also look at each object if result is a list [from RDF.rb implementation]
if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end),
do: raise JSON.LD.ListOfListsError,
message: "List of lists in #{inspect value}"
do:
raise(JSON.LD.ListOfListsError,
message: "List of lists in #{inspect(value)}"
)
value
end
"@set" -> # 7.4.10)
# 7.4.10)
"@set" ->
do_expand(active_context, active_property, value, options)
"@reverse" -> # 7.4.11)
# 7.4.11)
"@reverse" ->
unless is_map(value),
do: raise JSON.LD.InvalidReverseValueError,
message: "#{inspect value} is not a valid @reverse value"
expanded_value = do_expand(active_context, "@reverse", value, options) # 7.4.11.1)
do:
raise(JSON.LD.InvalidReverseValueError,
message: "#{inspect(value)} is not a valid @reverse value"
)
# 7.4.11.1)
expanded_value = do_expand(active_context, "@reverse", value, options)
# 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps:
new_result =
if Map.has_key?(expanded_value, "@reverse") do # 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps:
Enum.reduce expanded_value["@reverse"], result,
fn ({property, item}, new_result) ->
items = if is_list(item),
if Map.has_key?(expanded_value, "@reverse") do
Enum.reduce(expanded_value["@reverse"], result, fn {property, item},
new_result ->
items =
if is_list(item),
do: item,
else: [item]
Map.update(new_result, property, items, fn members ->
members ++ items
end)
end
end)
else
result
end
# 7.4.11.3)
new_result =
if Map.keys(expanded_value) != ["@reverse"] do # 7.4.11.3)
if Map.keys(expanded_value) != ["@reverse"] do
reverse_map =
Enum.reduce expanded_value, Map.get(new_result, "@reverse", %{}), fn
({property, items}, reverse_map) when property != "@reverse" ->
Enum.reduce(expanded_value, Map.get(new_result, "@reverse", %{}), fn
{property, items}, reverse_map when property != "@reverse" ->
Enum.each(items, fn item ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
do:
raise(JSON.LD.InvalidReversePropertyValueError,
message:
"invalid value for a reverse property in #{inspect(item)}"
)
end)
Map.update(reverse_map, property, items, fn members ->
members ++ items
end)
(_, reverse_map) -> reverse_map
end
_, reverse_map ->
reverse_map
end)
Map.put(new_result, "@reverse", reverse_map)
else
new_result
end
{:skip, new_result}
_ ->
nil
end
# 7.4.12)
case expanded_value do
nil ->
result
{:skip, new_result} ->
new_result
expanded_value ->
Map.put(result, expanded_property, expanded_value)
end
else # expanded_property is not a keyword
else
term_def = active_context.term_defs[key]
expanded_value = cond do
expanded_value =
cond do
# 7.5) Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows:
is_map(value) && term_def && term_def.container_mapping == "@language" ->
value
|> Enum.sort_by(fn {language, _} -> language end)
|> Enum.reduce([], fn ({language, language_value}, language_map_result) ->
language_map_result ++ (
if(is_list(language_value),
|> Enum.reduce([], fn {language, language_value}, language_map_result ->
language_map_result ++
(if(is_list(language_value),
do: language_value,
else: [language_value])
else: [language_value]
)
|> Enum.map(fn
item when is_binary(item) ->
%{
"@value" => item,
"@language" => String.downcase(language)
}
item ->
raise JSON.LD.InvalidLanguageMapValueError,
message: "#{inspect item} is not a valid language map value"
message: "#{inspect(item)} is not a valid language map value"
end))
end)
end)
)
end)
# 7.6)
is_map(value) && term_def && term_def.container_mapping == "@index" ->
value
|> Enum.sort_by(fn {index, _} -> index end)
|> Enum.reduce([], fn ({index, index_value}, index_map_result) ->
index_map_result ++ (
index_value = if(is_list(index_value),
|> Enum.reduce([], fn {index, index_value}, index_map_result ->
index_map_result ++
(
index_value =
if(is_list(index_value),
do: index_value,
else: [index_value])
else: [index_value]
)
index_value = do_expand(active_context, key, index_value, options)
Enum.map(index_value, fn item ->
Map.put_new(item, "@index", index)
end)
)
end)
# 7.7)
true ->
do_expand(active_context, key, value, options)
end
# 7.8)
if is_nil(expanded_value) do
result
else
# 7.9)
expanded_value =
if (term_def && term_def.container_mapping == "@list") &&
if term_def && term_def.container_mapping == "@list" &&
!(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do
%{"@list" =>
(if is_list(expanded_value),
%{
"@list" =>
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])}
else: [expanded_value]
)
}
else
expanded_value
end
# 7.10) Otherwise, if the term definition associated to key indicates that it is a reverse property
# Spec FIXME: this is not an otherwise [from RDF.rb implementation]
# 7.11)
if term_def && term_def.reverse_property do
reverse_map = Map.get(result, "@reverse", %{})
reverse_map =
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
|> Enum.reduce(reverse_map, fn (item, reverse_map) ->
else: [expanded_value]
)
|> Enum.reduce(reverse_map, fn item, reverse_map ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
Map.update reverse_map, expanded_property, [item], fn members ->
do:
raise(JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect(item)}"
)
Map.update(reverse_map, expanded_property, [item], fn members ->
members ++ [item]
end
end)
end)
Map.put(result, "@reverse", reverse_map)
else # 7.11)
expanded_value = if is_list(expanded_value),
else
expanded_value =
if is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
Map.update result, expanded_property, expanded_value,
fn values -> expanded_value ++ values end
Map.update(result, expanded_property, expanded_value, fn values ->
expanded_value ++ values
end)
end
end
end
@ -300,90 +404,118 @@ defmodule JSON.LD.Expansion do
end
end)
result = case result do
result =
case result do
# 8)
%{"@value" => value} ->
with keys = Map.keys(result) do # 8.1)
# 8.1)
with keys = Map.keys(result) do
if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) ||
("@language" in keys and "@type" in keys) do
raise JSON.LD.InvalidValueObjectError,
message: "value object with disallowed members"
end
end
cond do
value == nil -> nil # 8.2)
!is_binary(value) and Map.has_key?(result, "@language") -> # 8.3)
# 8.2)
value == nil ->
nil
# 8.3)
!is_binary(value) and Map.has_key?(result, "@language") ->
raise JSON.LD.InvalidLanguageTaggedValueError,
message: "@value '#{inspect value}' is tagged with a language"
(type = result["@type"]) && !RDF.uri?(type) -> # 8.4)
message: "@value '#{inspect(value)}' is tagged with a language"
# 8.4)
(type = result["@type"]) && !RDF.uri?(type) ->
raise JSON.LD.InvalidTypedValueError,
message: "@value '#{inspect value}' has invalid type #{inspect type}"
true -> result
message: "@value '#{inspect(value)}' has invalid type #{inspect(type)}"
true ->
result
end
# 9)
%{"@type" => type} when not is_list(type) ->
Map.put(result, "@type", [type])
# 10)
%{"@set" => set} ->
validate_set_or_list_object(result)
set
%{"@list" => _} ->
validate_set_or_list_object(result)
result
_ -> result
_ ->
result
end
# 11) If result contains only the key @language, set result to null.
result = if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"),
do: nil, else: result
result =
if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"),
do: nil,
else: result
# 12) If active property is null or @graph, drop free-floating values as follows:
# Spec FIXME: Due to case 10) we might land with a list here; other implementations deal with that, by just returning in step 10)
result = if is_map(result) and active_property in [nil, "@graph"] and (
Enum.empty?(result) or
result =
if is_map(result) and active_property in [nil, "@graph"] and
(Enum.empty?(result) or
Map.has_key?(result, "@value") or Map.has_key?(result, "@list") or
(map_size(result) == 1 and Map.has_key?(result, "@id"))),
do: nil, else: result
do: nil,
else: result
result
end
defp validate_set_or_list_object(object) when map_size(object) == 1, do: true
defp validate_set_or_list_object(object = %{"@index" => _})
when map_size(object) == 2, do: true
when map_size(object) == 2,
do: true
defp validate_set_or_list_object(object) do
raise JSON.LD.InvalidSetOrListObjectError,
message: "set or list object with disallowed members: #{inspect object}"
message: "set or list object with disallowed members: #{inspect(object)}"
end
@doc """
Details at <http://json-ld.org/spec/latest/json-ld-api/#value-expansion>
"""
def expand_value(active_context, active_property, value) do
with term_def = Map.get(active_context.term_defs, active_property,
%JSON.LD.Context.TermDefinition{}) do
with term_def =
Map.get(active_context.term_defs, active_property, %JSON.LD.Context.TermDefinition{}) do
cond do
term_def.type_mapping == "@id" ->
%{"@id" => expand_iri(value, active_context, true, false)}
term_def.type_mapping == "@vocab" ->
%{"@id" => expand_iri(value, active_context, true, true)}
type_mapping = term_def.type_mapping ->
%{"@value" => value, "@type" => type_mapping}
is_binary(value) ->
language_mapping = term_def.language_mapping
cond do
language_mapping ->
%{"@value" => value, "@language" => language_mapping}
language_mapping == false && active_context.default_language ->
%{"@value" => value, "@language" => active_context.default_language}
true ->
%{"@value" => value}
end
true ->
%{"@value" => value}
end
end
end
true ->
%{"@value" => value}
end
end
end
end

View File

@ -4,16 +4,16 @@ defmodule JSON.LD.Flattening do
import JSON.LD.{NodeIdentifierMap, Utils}
alias JSON.LD.NodeIdentifierMap
def flatten(input, context \\ nil, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
expanded = JSON.LD.expand(input, options),
node_map = node_map(expanded)
do
node_map = node_map(expanded) do
default_graph =
Enum.reduce node_map, node_map["@default"], fn
({"@default", _}, default_graph) -> default_graph
({graph_name, graph}, default_graph) ->
Enum.reduce(node_map, node_map["@default"], fn
{"@default", _}, default_graph ->
default_graph
{graph_name, graph}, default_graph ->
entry =
if Map.has_key?(default_graph, graph_name) do
default_graph[graph_name]
@ -24,31 +24,32 @@ defmodule JSON.LD.Flattening do
graph_entry =
graph
|> Stream.reject(fn {_, node} ->
Map.has_key?(node, "@id") and map_size(node) == 1 end)
Map.has_key?(node, "@id") and map_size(node) == 1
end)
|> Enum.sort_by(fn {id, _} -> id end)
# TODO: Spec fixme: Spec doesn't handle the case, when a "@graph" member already exists
|> Enum.reduce(Map.get(entry, "@graph", []), fn ({_, node}, graph_entry) ->
|> Enum.reduce(Map.get(entry, "@graph", []), fn {_, node}, graph_entry ->
[node | graph_entry]
end)
|> Enum.reverse
|> Enum.reverse()
Map.put(default_graph, graph_name,
Map.put(entry, "@graph", graph_entry))
end
Map.put(default_graph, graph_name, Map.put(entry, "@graph", graph_entry))
end)
flattened =
default_graph
|> Enum.sort_by(fn {id, _} -> id end)
|> Enum.reduce([], fn ({_, node}, flattened) ->
|> Enum.reduce([], fn {_, node}, flattened ->
if not (Enum.count(node) == 1 and Map.has_key?(node, "@id")) do
[node | flattened]
else
flattened
end
end)
|> Enum.reverse
|> Enum.reverse()
if context && !Enum.empty?(flattened) do # TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other implementations (Ruby, Java, Go, ...)
# TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other implementations (Ruby, Java, Go, ...)
if context && !Enum.empty?(flattened) do
JSON.LD.compact(flattened, context, options)
else
flattened
@ -59,7 +60,8 @@ defmodule JSON.LD.Flattening do
def node_map(input, node_id_map \\ nil)
def node_map(input, nil) do
{:ok, node_id_map} = NodeIdentifierMap.start_link
{:ok, node_id_map} = NodeIdentifierMap.start_link()
try do
node_map(input, node_id_map)
after
@ -76,30 +78,59 @@ defmodule JSON.LD.Flattening do
Details at <https://www.w3.org/TR/json-ld-api/#node-map-generation>
"""
def generate_node_map(element, node_map, node_id_map, active_graph \\ "@default",
active_subject \\ nil, active_property \\ nil, list \\ nil)
def generate_node_map(
element,
node_map,
node_id_map,
active_graph \\ "@default",
active_subject \\ nil,
active_property \\ nil,
list \\ nil
)
# 1)
def generate_node_map(element, node_map, node_id_map, active_graph, active_subject,
active_property, list) when is_list(element) do
Enum.reduce element, node_map, fn (item, node_map) ->
generate_node_map(item, node_map, node_id_map, active_graph, active_subject,
active_property, list)
def generate_node_map(
element,
node_map,
node_id_map,
active_graph,
active_subject,
active_property,
list
)
when is_list(element) do
Enum.reduce(element, node_map, fn item, node_map ->
generate_node_map(
item,
node_map,
node_id_map,
active_graph,
active_subject,
active_property,
list
)
end)
end
end
# 2)
def generate_node_map(element, node_map, node_id_map, active_graph, active_subject,
active_property, list) when is_map(element) do
def generate_node_map(
element,
node_map,
node_id_map,
active_graph,
active_subject,
active_property,
list
)
when is_map(element) do
node_map = Map.put_new(node_map, active_graph, %{})
node = node_map[active_graph][active_subject]
# 3)
element =
if old_types = Map.get(element, "@type") do
new_types = Enum.reduce(List.wrap(old_types), [],
fn (item, types) ->
new_types =
Enum.reduce(List.wrap(old_types), [], fn item, types ->
if blank_node_id?(item) do
identifier = generate_blank_node_id(node_id_map, item)
types ++ [identifier]
@ -107,20 +138,25 @@ defmodule JSON.LD.Flattening do
types ++ [item]
end
end)
Map.put(element, "@type",
if(is_list(old_types), do: new_types, else: List.first(new_types)))
Map.put(
element,
"@type",
if(is_list(old_types), do: new_types, else: List.first(new_types))
)
else
element
end
cond do
# 4)
Map.has_key?(element, "@value") ->
if is_nil(list) do
if node do
update_in(node_map, [active_graph, active_subject, active_property], fn
nil -> [element]
nil ->
[element]
items ->
unless element in items,
do: items ++ [element],
@ -137,16 +173,25 @@ defmodule JSON.LD.Flattening do
# 5)
Map.has_key?(element, "@list") ->
{:ok, result_list} = new_list()
{node_map, result} =
try do
{
generate_node_map(element["@list"], node_map, node_id_map,
active_graph, active_subject, active_property, result_list),
generate_node_map(
element["@list"],
node_map,
node_id_map,
active_graph,
active_subject,
active_property,
result_list
),
get_list(result_list)
}
after
terminate_list(result_list)
end
if node do
update_in(node_map, [active_graph, active_subject, active_property], fn
nil -> [result]
@ -160,6 +205,7 @@ defmodule JSON.LD.Flattening do
true ->
# 6.1)
{id, element} = Map.pop(element, "@id")
id =
if id do
if blank_node_id?(id) do
@ -167,6 +213,7 @@ defmodule JSON.LD.Flattening do
else
id
end
# 6.2)
else
generate_blank_node_id(node_id_map)
@ -190,7 +237,9 @@ defmodule JSON.LD.Flattening do
if is_map(active_subject) do
unless Map.has_key?(node, active_property) do
update_in(node_map, [active_graph, id, active_property], fn
nil -> [active_subject]
nil ->
[active_subject]
items ->
unless active_subject in items,
do: items ++ [active_subject],
@ -199,18 +248,23 @@ defmodule JSON.LD.Flattening do
else
node_map
end
# 6.6)
else
unless is_nil(active_property) do
reference = %{"@id" => id}
if is_nil(list) do
update_in(node_map, [active_graph, active_subject, active_property], fn
nil -> [reference]
nil ->
[reference]
items ->
unless reference in items,
do: items ++ [reference],
else: items
end)
# 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, should be REFERENCE
else
append_to_list(list, reference)
@ -225,15 +279,18 @@ defmodule JSON.LD.Flattening do
{node_map, element} =
if Map.has_key?(element, "@type") do
node_map =
Enum.reduce element["@type"], node_map, fn (type, node_map) ->
Enum.reduce(element["@type"], node_map, fn type, node_map ->
update_in(node_map, [active_graph, id, "@type"], fn
nil -> [type]
nil ->
[type]
items ->
unless type in items,
do: items ++ [type],
else: items
end)
end
end)
element = Map.delete(element, "@type")
{node_map, element}
else
@ -244,6 +301,7 @@ defmodule JSON.LD.Flattening do
{node_map, element} =
if Map.has_key?(element, "@index") do
{element_index, element} = Map.pop(element, "@index")
node_map =
if node_index = get_in(node_map, [active_graph, id, "@index"]) do
if not deep_compare(node_index, element_index) do
@ -251,10 +309,11 @@ defmodule JSON.LD.Flattening do
message: "Multiple conflicting indexes have been found for the same node."
end
else
update_in node_map, [active_graph, id], fn node ->
update_in(node_map, [active_graph, id], fn node ->
Map.put(node, "@index", element_index)
end)
end
end
{node_map, element}
else
{node_map, element}
@ -265,12 +324,21 @@ defmodule JSON.LD.Flattening do
if Map.has_key?(element, "@reverse") do
referenced_node = %{"@id" => id}
{reverse_map, element} = Map.pop(element, "@reverse")
node_map = Enum.reduce reverse_map, node_map, fn ({property, values}, node_map) ->
Enum.reduce values, node_map, fn (value, node_map) ->
generate_node_map(value, node_map, node_id_map, active_graph,
referenced_node, property)
end
end
node_map =
Enum.reduce(reverse_map, node_map, fn {property, values}, node_map ->
Enum.reduce(values, node_map, fn value, node_map ->
generate_node_map(
value,
node_map,
node_id_map,
active_graph,
referenced_node,
property
)
end)
end)
{node_map, element}
else
{node_map, element}
@ -288,21 +356,23 @@ defmodule JSON.LD.Flattening do
# 6.11)
element
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(node_map, fn ({property, value}, node_map) ->
|> Enum.reduce(node_map, fn {property, value}, node_map ->
property =
if blank_node_id?(property) do
generate_blank_node_id(node_id_map, property)
else
property
end
node_map =
unless Map.has_key?(node_map[active_graph][id], property) do
update_in node_map, [active_graph, id], fn node ->
update_in(node_map, [active_graph, id], fn node ->
Map.put(node, property, [])
end
end)
else
node_map
end
generate_node_map(value, node_map, node_id_map, active_graph, id, property)
end)
end
@ -314,29 +384,29 @@ defmodule JSON.LD.Flattening do
Map.has_key?(v2, k) && deep_compare(v, v2[k])
end)
end
defp deep_compare(v1, v2) when is_list(v1) and is_list(v2) do
Enum.count(v1) == Enum.count(v2) && MapSet.new(v1) == MapSet.new(v2)
end
defp deep_compare(v, v), do: true
defp deep_compare(_, _), do: false
defp new_list do
Agent.start_link fn -> %{"@list" => []} end
Agent.start_link(fn -> %{"@list" => []} end)
end
defp terminate_list(pid) do
Agent.stop pid
Agent.stop(pid)
end
defp get_list(pid) do
Agent.get pid, fn list_node -> list_node end
Agent.get(pid, fn list_node -> list_node end)
end
defp append_to_list(pid, element) do
Agent.update pid, fn list_node ->
Agent.update(pid, fn list_node ->
Map.update(list_node, "@list", [element], fn list -> list ++ [element] end)
end)
end
end
end

View File

@ -1,14 +1,20 @@
defmodule JSON.LD.IRIExpansion do
import JSON.LD.Utils
@keywords JSON.LD.keywords # to allow this to be used in function guard clauses, we redefine this here
# to allow this to be used in function guard clauses, we redefine this here
@keywords JSON.LD.keywords()
@doc """
see http://json-ld.org/spec/latest/json-ld-api/#iri-expansion
"""
def expand_iri(value, active_context, doc_relative \\ false, vocab \\ false,
local_context \\ nil, defined \\ nil)
def expand_iri(
value,
active_context,
doc_relative \\ false,
vocab \\ false,
local_context \\ nil,
defined \\ nil
)
# 1) If value is a keyword or null, return value as is.
def expand_iri(value, active_context, _, _, local_context, defined)
@ -25,8 +31,14 @@ defmodule JSON.LD.IRIExpansion do
{active_context, defined} =
if local_context && local_context[value] && defined[value] != true do
local_def = local_context[value]
JSON.LD.Context.create_term_definition(
active_context, local_context, value, local_def, defined)
active_context,
local_context,
value,
local_def,
defined
)
else
{active_context, defined}
end
@ -37,6 +49,7 @@ defmodule JSON.LD.IRIExpansion do
vocab && Map.has_key?(active_context.term_defs, value) ->
result = (term_def = active_context.term_defs[value]) && term_def.iri_mapping
{result, active_context, defined}
# 4) If value contains a colon (:), it is either an absolute IRI, a compact IRI, or a blank node identifier
String.contains?(value, ":") ->
case compact_iri_parts(value) do
@ -45,33 +58,47 @@ defmodule JSON.LD.IRIExpansion do
{active_context, defined} =
if local_context && local_context[prefix] && defined[prefix] != true do
local_def = local_context[prefix]
JSON.LD.Context.create_term_definition(
active_context, local_context, prefix, local_def, defined)
active_context,
local_context,
prefix,
local_def,
defined
)
else
{active_context, defined}
end
# 4.4)
result =
if prefix_def = active_context.term_defs[prefix] do
prefix_def.iri_mapping <> suffix
else
value # 4.5)
# 4.5)
value
end
{result, active_context, defined}
nil ->
{value, active_context, defined} # 4.2)
# 4.2)
{value, active_context, defined}
end
# 5) If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value.
vocab && active_context.vocab ->
vocabulary_mapping = active_context.vocab
{vocabulary_mapping <> value, active_context, defined}
# 6) Otherwise, if document relative is true, set value to the result of resolving value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987].
doc_relative ->
{absolute_iri(value, JSON.LD.Context.base(active_context)), active_context, defined}
# TODO: RDF.rb's implementation differs from the spec here, by checking if base_iri is actually present in the previous clause and adding the following additional clause. Another Spec error?
# if local_context && RDF::URI(value).relative?
# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted.
# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}"
# TODO: RDF.rb's implementation differs from the spec here, by checking if base_iri is actually present in the previous clause and adding the following additional clause. Another Spec error?
# if local_context && RDF::URI(value).relative?
# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted.
# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}"
# 7) Return value as is.
true ->
{value, active_context, defined}
@ -83,5 +110,4 @@ defmodule JSON.LD.IRIExpansion do
result
end
end
end

View File

@ -22,7 +22,6 @@ defmodule JSON.LD.NodeIdentifierMap do
GenServer.call(pid, {:generate_id, identifier})
end
# Server Callbacks
def init(:ok) do
@ -34,7 +33,9 @@ defmodule JSON.LD.NodeIdentifierMap do
{:reply, map[identifier], state}
else
blank_node_id = "_:b#{counter}"
{:reply, blank_node_id, %{
{:reply, blank_node_id,
%{
counter: counter + 1,
map:
if identifier do
@ -45,5 +46,4 @@ defmodule JSON.LD.NodeIdentifierMap do
}}
end
end
end

View File

@ -17,5 +17,4 @@ defmodule JSON.LD.Options do
def new(), do: %JSON.LD.Options{}
def new(%JSON.LD.Options{} = options), do: options
def new(options), do: struct(JSON.LD.Options, options)
end

View File

@ -1,5 +1,4 @@
defmodule JSON.LD.Utils do
alias RDF.IRI
@doc """
@ -16,24 +15,23 @@ defmodule JSON.LD.Utils do
def absolute_iri(value, nil),
do: value
def absolute_iri(value, base_iri),
do: value |> RDF.IRI.absolute(base_iri) |> to_string
def relative_iri?(value),
do: not (JSON.LD.keyword?(value) or IRI.absolute?(value) or blank_node_id?(value))
def compact_iri_parts(compact_iri, exclude_bnode \\ true) do
with [prefix, suffix] <- String.split(compact_iri, ":", parts: 2) do
if not(String.starts_with?(suffix, "//")) and
not(exclude_bnode and prefix == "_"),
if not String.starts_with?(suffix, "//") and
not (exclude_bnode and prefix == "_"),
do: [prefix, suffix]
else
_ -> nil
end
end
@doc """
Checks if the given value is a blank node identifier.
@ -47,9 +45,11 @@ defmodule JSON.LD.Utils do
def blank_node_id?("_:" <> _), do: true
def blank_node_id?(_), do: false
def scalar?(value)
when is_binary(value) or is_number(value) or
is_boolean(value),
do: true
def scalar?(value) when is_binary(value) or is_number(value) or
is_boolean(value), do: true
def scalar?(_), do: false
def list?(%{"@list" => _}), do: true
@ -58,5 +58,4 @@ defmodule JSON.LD.Utils do
def index?(_), do: false
def value?(%{"@value" => _}), do: true
def value?(_), do: false
end

View File

@ -1,5 +1,4 @@
defmodule JSON.LD do
use RDF.Serialization.Format
import RDF.Sigils
@ -9,7 +8,7 @@ defmodule JSON.LD do
@extension "jsonld"
@media_type "application/ld+json"
def options, do: JSON.LD.Options.new
def options, do: JSON.LD.Options.new()
@keywords ~w[
@base
@ -42,7 +41,6 @@ defmodule JSON.LD do
def keyword?(value) when is_binary(value) and value in @keywords, do: true
def keyword?(_value), do: false
@doc """
Expands the given input according to the steps in the JSON-LD Expansion Algorithm.
@ -57,7 +55,6 @@ defmodule JSON.LD do
defdelegate expand(input, options \\ %JSON.LD.Options{}),
to: JSON.LD.Expansion
@doc """
Compacts the given input according to the steps in the JSON-LD Compaction Algorithm.
@ -74,7 +71,6 @@ defmodule JSON.LD do
defdelegate compact(input, context, options \\ %JSON.LD.Options{}),
to: JSON.LD.Compaction
@doc """
Flattens the given input according to the steps in the JSON-LD Flattening Algorithm.
@ -90,7 +86,6 @@ defmodule JSON.LD do
defdelegate flatten(input, context \\ nil, options \\ %JSON.LD.Options{}),
to: JSON.LD.Flattening
@doc """
Generator function for `JSON.LD.Context`s.
@ -105,11 +100,9 @@ defmodule JSON.LD do
def context(context, options),
do: JSON.LD.Context.create(%{"@context" => context}, options)
@doc """
Generator function for JSON-LD node maps.
"""
def node_map(input, node_id_map \\ nil),
do: JSON.LD.Flattening.node_map(input, node_id_map)
end

14
mix.exs
View File

@ -3,15 +3,15 @@ defmodule JSON.LD.Mixfile do
@repo_url "https://github.com/rdf-elixir/jsonld-ex"
@version File.read!("VERSION") |> String.trim
@version File.read!("VERSION") |> String.trim()
def project do
[
app: :json_ld,
version: @version,
elixir: "~> 1.8",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
@ -25,7 +25,7 @@ defmodule JSON.LD.Mixfile do
main: "JSON.LD",
source_url: @repo_url,
source_ref: "v#{@version}",
extras: ["README.md"],
extras: ["README.md"]
],
# ExCoveralls
@ -35,7 +35,7 @@ defmodule JSON.LD.Mixfile do
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
]
]
end
@ -63,12 +63,12 @@ defmodule JSON.LD.Mixfile do
{:rdf, "~> 0.8"},
{:jason, "~> 1.2"},
{:httpoison, "~> 1.7"},
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false},
{:ex_doc, "~> 0.22", only: :dev, runtime: false},
{:bypass, "~> 1.0", only: :test},
{:plug_cowboy, "~> 1.0", only: :test}, # in order to run under OTP 21 we need to keep this dependency of bypass on 1.0
# in order to run under OTP 21 we need to keep this dependency of bypass on 1.0
{:plug_cowboy, "~> 1.0", only: :test},
{:excoveralls, "~> 0.13", only: :test}
]
end

View File

@ -13,9 +13,12 @@ defmodule JSON.LD.TestSuite.CompactTest do
@tag :compact_test_suite
@tag data: test_case
test "#{input}: #{name}",
%{data: %{"input" => input, "expect" => output, "context" => context} = test_case, base_iri: base_iri} do
assert JSON.LD.compact(j(input), j(context), test_case_options(test_case, base_iri)) == j(output)
%{
data: %{"input" => input, "expect" => output, "context" => context} = test_case,
base_iri: base_iri
} do
assert JSON.LD.compact(j(input), j(context), test_case_options(test_case, base_iri)) ==
j(output)
end
end)
end

View File

@ -24,8 +24,6 @@ defmodule JSON.LD.TestSuite.ErrorTest do
assert_raise exception(error), fn ->
JSON.LD.flatten(j(input), context, test_case_options(test_case, base_iri))
end
end
end)
end

View File

@ -8,14 +8,15 @@ defmodule JSON.LD.TestSuite.ExpandTest do
end
test_cases("expand")
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0034] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0035] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0038] end)
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0034] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0035] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0038] end)
|> Enum.each(fn %{"name" => name, "input" => input} = test_case ->
if input in ~w[expand-0034-in.jsonld expand-0035-in.jsonld expand-0038-in.jsonld] do
@tag skip: "TODO: Actually correct values are expanded, but the ordering is different."
end
@tag :test_suite
@tag :expand_test_suite
@tag data: test_case
@ -24,5 +25,4 @@ defmodule JSON.LD.TestSuite.ExpandTest do
assert JSON.LD.expand(j(input), test_case_options(test_case, base_iri)) == j(output)
end
end)
end

View File

@ -8,14 +8,15 @@ defmodule JSON.LD.TestSuite.FlattenTest do
end
test_cases("flatten")
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0034] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0035] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0038] end)
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0034] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0035] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0038] end)
|> Enum.each(fn %{"name" => name, "input" => input} = test_case ->
if input in ~w[flatten-0034-in.jsonld flatten-0035-in.jsonld flatten-0038-in.jsonld] do
@tag skip: "TODO: Actually correct values are expanded, but the ordering is different."
end
@tag :test_suite
@tag :flatten_test_suite
@tag data: test_case
@ -26,8 +27,9 @@ defmodule JSON.LD.TestSuite.FlattenTest do
nil -> nil
context -> j(context)
end
assert JSON.LD.flatten(j(input), context, test_case_options(test_case, base_iri)) == j(output)
assert JSON.LD.flatten(j(input), context, test_case_options(test_case, base_iri)) ==
j(output)
end
end)
end

View File

@ -3,21 +3,20 @@ defmodule JSON.LD.TestSuite.FromRdfTest do
import JSON.LD.TestSuite
setup_all do
[base_iri: manifest("fromRdf")["baseIri"]]
end
test_cases("fromRdf")
# TODO: https://github.com/json-ld/json-ld.org/issues/357
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0020] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0021] end)
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0001] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0002] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0017] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0018] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0019] end)
# TODO: https://github.com/json-ld/json-ld.org/issues/357
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0020] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0021] end)
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0001] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0002] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0017] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0018] end)
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0019] end)
|> Enum.each(fn %{"name" => name, "input" => input} = test_case ->
if input in ~w[fromRdf-0001-in.nq fromRdf-0002-in.nq fromRdf-0017-in.nq fromRdf-0018-in.nq fromRdf-0019-in.nq] do
@ -26,9 +25,11 @@ defmodule JSON.LD.TestSuite.FromRdfTest do
So, fixing that would require a different representation of graphs in general.
"""
end
if input in ~w[fromRdf-0020-in.nq fromRdf-0021-in.nq] do
@tag skip: "https://github.com/json-ld/json-ld.org/issues/357"
end
@tag :test_suite
@tag :from_rdf_test_suite
@tag data: test_case
@ -41,14 +42,14 @@ defmodule JSON.LD.TestSuite.FromRdfTest do
def serialize(filename, options) do
filename
|> file
|> RDF.NQuads.read_file!
|> RDF.NQuads.read_file!()
|> JSON.LD.Encoder.from_rdf!(options)
end
def json(filename) do
filename
|> file
|> File.read!
|> Jason.decode!
|> File.read!()
|> Jason.decode!()
end
end

View File

@ -1,6 +1,6 @@
# TODO: These tests are disabled as we don't support remote document loading yet.
#defmodule JSON.LD.TestSuite.RemoteDocTest do
# defmodule JSON.LD.TestSuite.RemoteDocTest do
# use ExUnit.Case, async: false
#
# import JSON.LD.TestSuite
@ -54,4 +54,4 @@
# end
# end
# end)
#end
# end

View File

@ -9,8 +9,8 @@ defmodule JSON.LD.TestSuite.ToRdfTest do
end
test_cases("toRdf")
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0118] end)
# TODO: Ordering problems
# |> Enum.filter(fn %{"@id" => id} -> id in ~w[#t0118] end)
|> Enum.each(fn %{"name" => name, "input" => input} = test_case ->
if input in ~w[toRdf-0118-in.jsonld] do
@tag skip: """
@ -18,13 +18,15 @@ defmodule JSON.LD.TestSuite.ToRdfTest do
during expansion the generated blank nodes are named different.
"""
end
@tag :test_suite
@tag :to_rdf_test_suite
@tag data: test_case
test "#{input}: #{name}",
%{data: %{"input" => input, "expect" => output} = test_case, base_iri: base_iri} do
# This requires a special handling, since the N-Quad ouput file is not valid, by using blank nodes as predicates
dataset = if input == "toRdf-0118-in.jsonld",
dataset =
if input == "toRdf-0118-in.jsonld",
do: toRdf_0118_dataset(),
else: RDF.NQuads.read_file!(file(output))
@ -42,7 +44,7 @@ defmodule JSON.LD.TestSuite.ToRdfTest do
{RDF.bnode("b0"), RDF.bnode("b0"), RDF.bnode("b2")},
{RDF.bnode("b0"), RDF.bnode("b0"), RDF.bnode("b3")},
{RDF.bnode("b1"), RDF.bnode("b0"), "term"},
{RDF.bnode("b2"), RDF.bnode("b0"), "termId"},
{RDF.bnode("b2"), RDF.bnode("b0"), "termId"}
])
end
end

View File

@ -1,6 +1,5 @@
defmodule JSON.LD.TestData do
@dir Path.join(File.cwd!, "test/data/")
@dir Path.join(File.cwd!(), "test/data/")
def dir, do: @dir
def file(name) do
@ -10,5 +9,4 @@ defmodule JSON.LD.TestData do
raise "Test data file '#{name}' not found"
end
end
end

View File

@ -1,5 +1,4 @@
defmodule JSON.LD.TestSuite do
@test_suite_dir "json-ld.org-test-suite"
def test_suite_dir, do: @test_suite_dir
@ -43,7 +42,7 @@ defmodule JSON.LD.TestSuite do
|> Map.get("option", %{})
|> Map.put_new("base", base_iri <> test_case["input"])
|> Enum.map(fn {key, value} ->
{key |> Macro.underscore |> String.to_atom, value}
{key |> Macro.underscore() |> String.to_atom(), value}
end)
|> Enum.map(fn
{:expand_context, file} -> {:expand_context, j(file)}
@ -52,13 +51,14 @@ defmodule JSON.LD.TestSuite do
end
def exception(error) do
error = error
error =
error
|> String.replace(" ", "_")
|> String.replace("-", "_")
|> String.replace("@", "_")
|> Macro.camelize
|> Macro.camelize()
|> String.replace("_", "")
String.to_existing_atom("Elixir.JSON.LD.#{error}Error")
end
end

View File

@ -4,7 +4,8 @@ defmodule JSON.LD.CompactionTest do
alias RDF.NS.{RDFS, XSD}
test "Flattened form of a JSON-LD document (EXAMPLES 57-59 of https://www.w3.org/TR/json-ld/#compacted-document-form)" do
input = Jason.decode! """
input =
Jason.decode!("""
[
{
"http://xmlns.com/foaf/0.1/name": [ "Manu Sporny" ],
@ -15,8 +16,10 @@ defmodule JSON.LD.CompactionTest do
]
}
]
"""
context = Jason.decode! """
""")
context =
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -26,8 +29,10 @@ defmodule JSON.LD.CompactionTest do
}
}
}
"""
assert JSON.LD.compact(input, context) == Jason.decode! """
""")
assert JSON.LD.compact(input, context) ==
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -39,10 +44,9 @@ defmodule JSON.LD.CompactionTest do
"name": "Manu Sporny",
"homepage": "http://manu.sporny.org/"
}
"""
""")
end
%{
"prefix" => %{
input: %{
@ -111,15 +115,15 @@ defmodule JSON.LD.CompactionTest do
},
"xsd:date coercion" => %{
input: %{
"http://example.com/b" => %{"@value" => "2012-01-04", "@type" => to_string(XSD.date)}
"http://example.com/b" => %{"@value" => "2012-01-04", "@type" => to_string(XSD.date())}
},
context: %{
"xsd" => XSD.__base_iri__,
"xsd" => XSD.__base_iri__(),
"b" => %{"@id" => "http://example.com/b", "@type" => "xsd:date"}
},
output: %{
"@context" => %{
"xsd" => XSD.__base_iri__,
"xsd" => XSD.__base_iri__(),
"b" => %{"@id" => "http://example.com/b", "@type" => "xsd:date"}
},
"b" => "2012-01-04"
@ -138,7 +142,7 @@ defmodule JSON.LD.CompactionTest do
"@list coercion (integer)" => %{
input: %{
"http://example.com/term" => [
%{"@list" => [1]},
%{"@list" => [1]}
]
},
context: %{
@ -150,7 +154,7 @@ defmodule JSON.LD.CompactionTest do
"term4" => %{"@id" => "http://example.com/term", "@container" => "@list"},
"@language" => "de"
},
"term4" => [1],
"term4" => [1]
}
},
"@set coercion" => %{
@ -176,24 +180,24 @@ defmodule JSON.LD.CompactionTest do
"@type with string @id" => %{
input: %{
"@id" => "http://example.com/",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
context: %{},
output: %{
"@id" => "http://example.com/",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
},
"@type" => RDFS.Resource |> RDF.uri() |> to_string
}
},
"@type with array @id" => %{
input: %{
"@id" => "http://example.com/",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
context: %{},
output: %{
"@id" => "http://example.com/",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
},
"@type" => RDFS.Resource |> RDF.uri() |> to_string
}
},
"default language" => %{
input: %{
@ -211,11 +215,11 @@ defmodule JSON.LD.CompactionTest do
"term5" => %{"@id" => "http://example.com/term", "@language" => nil},
"@language" => "de"
},
"term5" => [ "v5", "plain literal" ]
"term5" => ["v5", "plain literal"]
}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -227,24 +231,24 @@ defmodule JSON.LD.CompactionTest do
"@id" => %{
input: %{
"@id" => "",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
context: %{"id" => "@id"},
output: %{
"@context" => %{"id" => "@id"},
"id" => "",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
}
},
"@type" => %{
input: %{
"@type" => (RDFS.Resource |> RDF.uri |> to_string),
"@type" => RDFS.Resource |> RDF.uri() |> to_string,
"http://example.org/foo" => %{"@value" => "bar", "@type" => "http://example.com/type"}
},
context: %{"type" => "@type"},
output: %{
"@context" => %{"type" => "@type"},
"type" => (RDFS.Resource |> RDF.uri |> to_string),
"type" => RDFS.Resource |> RDF.uri() |> to_string,
"http://example.org/foo" => %{"@value" => "bar", "type" => "http://example.com/type"}
}
},
@ -277,9 +281,9 @@ defmodule JSON.LD.CompactionTest do
"@context" => %{"list" => "@list"},
"http://example.org/foo" => %{"list" => ["bar"]}
}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -290,26 +294,30 @@ defmodule JSON.LD.CompactionTest do
describe "term selection" do
%{
"Uses term with nil language when two terms conflict on language" => %{
input: [%{
input: [
%{
"http://example.com/term" => %{"@value" => "v1"}
}],
}
],
context: %{
"term5" => %{"@id" => "http://example.com/term","@language" => nil},
"term5" => %{"@id" => "http://example.com/term", "@language" => nil},
"@language" => "de"
},
output: %{
"@context" => %{
"term5" => %{"@id" => "http://example.com/term","@language" => nil},
"term5" => %{"@id" => "http://example.com/term", "@language" => nil},
"@language" => "de"
},
"term5" => "v1",
"term5" => "v1"
}
},
"Uses subject alias" => %{
input: [%{
input: [
%{
"@id" => "http://example.com/id1",
"http://example.com/id1" => %{"@value" => "foo", "@language" => "de"}
}],
}
],
context: %{
"id1" => "http://example.com/id1",
"@language" => "de"
@ -324,16 +332,19 @@ defmodule JSON.LD.CompactionTest do
}
},
"compact-0007" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
{"http://example.org/vocab#contains": "this-is-not-an-IRI"}
"""),
context: Jason.decode!("""
context:
Jason.decode!("""
{
"ex": "http://example.org/vocab#",
"ex:contains": {"@type": "@id"}
}
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
{
"@context": {
"ex": "http://example.org/vocab#",
@ -341,10 +352,10 @@ defmodule JSON.LD.CompactionTest do
},
"http://example.org/vocab#contains": "this-is-not-an-IRI"
}
"""),
""")
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -355,7 +366,8 @@ defmodule JSON.LD.CompactionTest do
describe "@reverse" do
%{
"compact-0033" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
[
{
"@id": "http://example.com/people/markus",
@ -371,13 +383,15 @@ defmodule JSON.LD.CompactionTest do
}
]
"""),
context: Jason.decode!("""
context:
Jason.decode!("""
{
"name": "http://xmlns.com/foaf/0.1/name",
"isKnownBy": { "@reverse": "http://xmlns.com/foaf/0.1/knows" }
}
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -395,7 +409,7 @@ defmodule JSON.LD.CompactionTest do
""")
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -408,62 +422,83 @@ defmodule JSON.LD.CompactionTest do
context = %{
"foo" => "http://example.com/"
}
input = %{
"http://example.com/" => "bar"
}
expected = %{
"@context" => %{
"foo" => "http://example.com/"
},
"foo" => "bar"
}
assert JSON.LD.compact(input, context) == expected
end
end
# TODO:
# describe "context as reference" do
# let(:remote_doc) do
# JSON::LD::API::RemoteDocument.new("http://example.com/context", %q({"@context": {"b": "http://example.com/b"}}))
# end
# test "uses referenced context" do
# input = %{
# "http://example.com/b" => "c"
# }
# expected = %{
# "@context" => "http://example.com/context",
# "b" => "c"
# }
# allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc)
# jld = JSON::LD::API.compact(input, "http://example.com/context", logger: logger, validate: true)
# expect(jld).to produce(expected, logger)
# end
# end
# TODO:
# describe "context as reference" do
# let(:remote_doc) do
# JSON::LD::API::RemoteDocument.new("http://example.com/context", %q({"@context": {"b": "http://example.com/b"}}))
# end
# test "uses referenced context" do
# input = %{
# "http://example.com/b" => "c"
# }
# expected = %{
# "@context" => "http://example.com/context",
# "b" => "c"
# }
# allow(JSON::LD::API).to receive(:documentLoader).with("http://example.com/context", anything).and_yield(remote_doc)
# jld = JSON::LD::API.compact(input, "http://example.com/context", logger: logger, validate: true)
# expect(jld).to produce(expected, logger)
# end
# end
describe "@list" do
%{
"1 term 2 lists 2 languages" => %{
input: [%{
input: [
%{
"http://example.com/foo" => [
%{"@list" => [%{"@value" => "en", "@language" => "en"}]},
%{"@list" => [%{"@value" => "de", "@language" => "de"}]}
]
}],
}
],
context: %{
"foo_en" => %{"@id" => "http://example.com/foo", "@container" => "@list", "@language" => "en"},
"foo_de" => %{"@id" => "http://example.com/foo", "@container" => "@list", "@language" => "de"}
"foo_en" => %{
"@id" => "http://example.com/foo",
"@container" => "@list",
"@language" => "en"
},
"foo_de" => %{
"@id" => "http://example.com/foo",
"@container" => "@list",
"@language" => "de"
}
},
output: %{
"@context" => %{
"foo_en" => %{"@id" => "http://example.com/foo", "@container" => "@list", "@language" => "en"},
"foo_de" => %{"@id" => "http://example.com/foo", "@container" => "@list", "@language" => "de"}
"foo_en" => %{
"@id" => "http://example.com/foo",
"@container" => "@list",
"@language" => "en"
},
"foo_de" => %{
"@id" => "http://example.com/foo",
"@container" => "@list",
"@language" => "de"
}
},
"foo_en" => ["en"],
"foo_de" => ["de"]
}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -499,9 +534,9 @@ defmodule JSON.LD.CompactionTest do
"de" => ["Die Königin", "Ihre Majestät"]
}
}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -524,9 +559,9 @@ defmodule JSON.LD.CompactionTest do
%{"ex:bar" => "bar"}
]
}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.compact(data.input, data.context) == data.output
@ -548,14 +583,13 @@ defmodule JSON.LD.CompactionTest do
"http://example.org/foo" => [%{"@list" => ["baz"]}]
},
exception: JSON.LD.ListOfListsError
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert_raise data.exception, fn -> JSON.LD.compact(data.input, %{}) end
end
end)
end
end

View File

@ -32,40 +32,39 @@ defmodule JSON.LD.ContextTest do
end
test "associates @list container mapping with predicate" do
c = JSON.LD.context(%{"foo" =>
%{"@id" => "http://example.com/", "@container" => "@list"}})
c = JSON.LD.context(%{"foo" => %{"@id" => "http://example.com/", "@container" => "@list"}})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/"
assert c.term_defs["foo"].container_mapping == "@list"
end
test "associates @set container mapping with predicate" do
c = JSON.LD.context(%{"foo" =>
%{"@id" => "http://example.com/", "@container" => "@set"}})
c = JSON.LD.context(%{"foo" => %{"@id" => "http://example.com/", "@container" => "@set"}})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/"
assert c.term_defs["foo"].container_mapping == "@set"
end
test "associates @id container mapping with predicate" do
c = JSON.LD.context(%{"foo" =>
%{"@id" => "http://example.com/", "@type" => "@id"}})
c = JSON.LD.context(%{"foo" => %{"@id" => "http://example.com/", "@type" => "@id"}})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/"
assert c.term_defs["foo"].type_mapping == "@id"
end
test "associates type mapping with predicate" do
c = JSON.LD.context(%{"foo" =>
%{"@id" => "http://example.com/", "@type" => to_string(XSD.string)}})
c =
JSON.LD.context(%{
"foo" => %{"@id" => "http://example.com/", "@type" => to_string(XSD.string())}
})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/"
assert c.term_defs["foo"].type_mapping == to_string(XSD.string)
assert c.term_defs["foo"].type_mapping == to_string(XSD.string())
end
test "associates language mapping with predicate" do
c = JSON.LD.context(%{"foo" =>
%{"@id" => "http://example.com/", "@language" => "en"}})
c = JSON.LD.context(%{"foo" => %{"@id" => "http://example.com/", "@language" => "en"}})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/"
assert c.term_defs["foo"].language_mapping == "en"
@ -76,7 +75,8 @@ defmodule JSON.LD.ContextTest do
"foo" => "bar",
"bar" => "baz",
"baz" => "http://example.com/"
}) |> iri_mappings == %{
})
|> iri_mappings == %{
"foo" => "http://example.com/",
"bar" => "http://example.com/",
"baz" => "http://example.com/"
@ -84,9 +84,12 @@ defmodule JSON.LD.ContextTest do
end
test "expands terms using @vocab" do
c = JSON.LD.context(%{
c =
JSON.LD.context(%{
"foo" => "bar",
"@vocab" => "http://example.com/"})
"@vocab" => "http://example.com/"
})
assert c.term_defs["foo"]
assert c.term_defs["foo"].iri_mapping == "http://example.com/bar"
end
@ -97,7 +100,8 @@ defmodule JSON.LD.ContextTest do
assert JSON.LD.context([
%{"foo" => "http://example.com/foo"},
%{"bar" => "foo"}
]) |> iri_mappings == %{
])
|> iri_mappings == %{
"foo" => "http://example.com/foo",
"bar" => "http://example.com/foo"
}
@ -107,23 +111,26 @@ defmodule JSON.LD.ContextTest do
describe "term definitions with null values" do
test "removes @language if set to null" do
assert JSON.LD.context([
%{ "@language" => "en" },
%{ "@language" => nil }
%{"@language" => "en"},
%{"@language" => nil}
]).default_language == nil
end
test "removes @vocab if set to null" do
assert JSON.LD.context([
%{ "@vocab" => "http://schema.org/" },
%{ "@vocab" => nil }
%{"@vocab" => "http://schema.org/"},
%{"@vocab" => nil}
]).vocab == nil
end
test "removes term if set to null with @vocab" do
assert JSON.LD.context([%{
assert JSON.LD.context([
%{
"@vocab" => "http://schema.org/",
"term" => nil
}]) |> iri_mappings == %{
}
])
|> iri_mappings == %{
"term" => nil
}
end
@ -133,7 +140,7 @@ defmodule JSON.LD.ContextTest do
end
test "loads initial context" do
init_ec = JSON.LD.Context.new
init_ec = JSON.LD.Context.new()
nil_ec = JSON.LD.context(nil)
assert nil_ec.default_language == init_ec.default_language
assert nil_ec |> coercions == init_ec |> coercions
@ -200,9 +207,9 @@ defmodule JSON.LD.ContextTest do
"@vocab as @id" => %{
input: %{"@vocab" => %{"@id" => "http://example.com/"}},
exception: JSON.LD.InvalidVocabMappingError
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert_raise data.exception, fn ->
@ -211,7 +218,7 @@ defmodule JSON.LD.ContextTest do
end
end)
(JSON.LD.keywords -- ~w[@base @language @vocab])
(JSON.LD.keywords() -- ~w[@base @language @vocab])
|> Enum.each(fn keyword ->
@tag keyword: keyword
test "does not redefine #{keyword} as a string", %{keyword: keyword} do
@ -237,29 +244,27 @@ defmodule JSON.LD.ContextTest do
@tag :skip
test "warn on terms starting with a @"
def iri_mappings(%JSON.LD.Context{term_defs: term_defs}) do
Enum.reduce term_defs, %{}, fn ({term, term_def}, iri_mappings) ->
Map.put iri_mappings, term, (term_def && term_def.iri_mapping) || nil
end
Enum.reduce(term_defs, %{}, fn {term, term_def}, iri_mappings ->
Map.put(iri_mappings, term, (term_def && term_def.iri_mapping) || nil)
end)
end
def languages(%JSON.LD.Context{term_defs: term_defs}) do
Enum.reduce term_defs, %{}, fn ({term, term_def}, language_mappings) ->
Map.put language_mappings, term, term_def.language_mapping
end
Enum.reduce(term_defs, %{}, fn {term, term_def}, language_mappings ->
Map.put(language_mappings, term, term_def.language_mapping)
end)
end
def coercions(%JSON.LD.Context{term_defs: term_defs}) do
Enum.reduce term_defs, %{}, fn ({term, term_def}, type_mappings) ->
Map.put type_mappings, term, term_def.type_mapping
end
Enum.reduce(term_defs, %{}, fn {term, term_def}, type_mappings ->
Map.put(type_mappings, term, term_def.type_mapping)
end)
end
def containers(%JSON.LD.Context{term_defs: term_defs}) do
Enum.reduce term_defs, %{}, fn ({term, term_def}, type_mappings) ->
Map.put type_mappings, term, term_def.container_mapping
Enum.reduce(term_defs, %{}, fn {term, term_def}, type_mappings ->
Map.put(type_mappings, term, term_def.container_mapping)
end)
end
end
end

View File

@ -17,9 +17,8 @@ defmodule JSON.LD.DecoderTest do
alias TestNS.{EX, S}
test "an empty JSON document is deserialized to an empty graph" do
assert JSON.LD.Decoder.decode!("{}") == Dataset.new
assert JSON.LD.Decoder.decode!("{}") == Dataset.new()
end
describe "unnamed nodes" do
@ -43,9 +42,9 @@ defmodule JSON.LD.DecoderTest do
"http://example.com/foo": {"@id": "_:a"}
}),
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.bnode("b0")}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -61,9 +60,9 @@ defmodule JSON.LD.DecoderTest do
"http://example.com/foo": "bar"
}),
{~I<http://example.com/a>, ~I<http://example.com/foo>, RDF.literal("bar")}
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -76,24 +75,24 @@ defmodule JSON.LD.DecoderTest do
"@id": "",
"@type": "#{RDF.uri(RDFS.Resource)}"
}),
{~I<http://example.org/>, NS.RDF.type, RDF.uri(RDFS.Resource)}
{~I<http://example.org/>, NS.RDF.type(), RDF.uri(RDFS.Resource)}
},
"relative" => {
~s({
"@id": "a/b",
"@type": "#{RDF.uri(RDFS.Resource)}"
}),
{~I<http://example.org/a/b>, NS.RDF.type, RDF.uri(RDFS.Resource)}
{~I<http://example.org/a/b>, NS.RDF.type(), RDF.uri(RDFS.Resource)}
},
"hash" => {
~s({
"@id": "#a",
"@type": "#{RDF.uri(RDFS.Resource)}"
}),
{~I<http://example.org/#a>, NS.RDF.type, RDF.uri(RDFS.Resource)}
},
{~I<http://example.org/#a>, NS.RDF.type(), RDF.uri(RDFS.Resource)}
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test "when relative IRIs #{title}", %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input, base: "http://example.org/") ==
@ -108,25 +107,25 @@ defmodule JSON.LD.DecoderTest do
~s({
"@type": "http://example.com/foo"
}),
{RDF.bnode("b0"), NS.RDF.type, ~I<http://example.com/foo>}
{RDF.bnode("b0"), NS.RDF.type(), ~I<http://example.com/foo>}
},
"two types" => {
~s({
"@type": ["http://example.com/foo", "http://example.com/baz"]
}),
[
{RDF.bnode("b0"), NS.RDF.type, ~I<http://example.com/foo>},
{RDF.bnode("b0"), NS.RDF.type, ~I<http://example.com/baz>},
{RDF.bnode("b0"), NS.RDF.type(), ~I<http://example.com/foo>},
{RDF.bnode("b0"), NS.RDF.type(), ~I<http://example.com/baz>}
]
},
"blank node type" => {
~s({
"@type": "_:foo"
}),
{RDF.bnode("b1"), NS.RDF.type, RDF.bnode("b0")}
{RDF.bnode("b1"), NS.RDF.type(), RDF.bnode("b0")}
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -148,7 +147,7 @@ defmodule JSON.LD.DecoderTest do
}),
[
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.literal("bar")},
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.literal("baz")},
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.literal("baz")}
]
},
"IRI" => {
@ -163,11 +162,11 @@ defmodule JSON.LD.DecoderTest do
}),
[
{RDF.bnode("b0"), ~I<http://example.com/foo>, ~I<http://example.com/bar>},
{RDF.bnode("b0"), ~I<http://example.com/foo>, ~I<http://example.com/baz>},
{RDF.bnode("b0"), ~I<http://example.com/foo>, ~I<http://example.com/baz>}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -177,23 +176,21 @@ defmodule JSON.LD.DecoderTest do
describe "literals" do
%{
"plain literal" =>
{
"plain literal" => {
~s({"@id": "http://greggkellogg.net/foaf#me", "http://xmlns.com/foaf/0.1/name": "Gregg Kellogg"}),
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Gregg Kellogg")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>,
RDF.literal("Gregg Kellogg")}
},
"explicit plain literal" =>
{
"explicit plain literal" => {
~s({"http://xmlns.com/foaf/0.1/name": {"@value": "Gregg Kellogg"}}),
{RDF.bnode("b0"), ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Gregg Kellogg")}
},
"language tagged literal" =>
{
"language tagged literal" => {
~s({"http://www.w3.org/2000/01/rdf-schema#label": {"@value": "A plain literal with a lang tag.", "@language": "en-us"}}),
{RDF.bnode("b0"), RDFS.label, RDF.literal("A plain literal with a lang tag.", language: "en-us")}
{RDF.bnode("b0"), RDFS.label(),
RDF.literal("A plain literal with a lang tag.", language: "en-us")}
},
"I18N literal with language" =>
{
"I18N literal with language" => {
~s([{
"@id": "http://greggkellogg.net/foaf#me",
"http://xmlns.com/foaf/0.1/knows": {"@id": "http://www.ivan-herman.net/foaf#me"}
@ -202,20 +199,22 @@ defmodule JSON.LD.DecoderTest do
"http://xmlns.com/foaf/0.1/name": {"@value": "Herman Iván", "@language": "hu"}
}]),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, ~I<http://www.ivan-herman.net/foaf#me>},
{~I<http://www.ivan-herman.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Herman Iv\u00E1n", language: "hu")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
~I<http://www.ivan-herman.net/foaf#me>},
{~I<http://www.ivan-herman.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>,
RDF.literal("Herman Iv\u00E1n", language: "hu")}
]
},
"explicit datatyped literal" =>
{
"explicit datatyped literal" => {
~s({
"@id": "http://greggkellogg.net/foaf#me",
"http://purl.org/dc/terms/created": {"@value": "1957-02-27", "@type": "http://www.w3.org/2001/XMLSchema#date"}
}),
{~I<http://greggkellogg.net/foaf#me>, ~I<http://purl.org/dc/terms/created>, RDF.literal("1957-02-27", datatype: XSD.date)},
},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://purl.org/dc/terms/created>,
RDF.literal("1957-02-27", datatype: XSD.date())}
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -239,8 +238,8 @@ defmodule JSON.LD.DecoderTest do
{RDF.bnode("b0"), ~I<http://example.com/default#foo>, RDF.literal("bar")}
}
}
|> Enum.each(fn ({title, data}) ->
if title == "empty suffix", do: @tag :skip
|> Enum.each(fn {title, data} ->
if title == "empty suffix", do: @tag(:skip)
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -258,12 +257,13 @@ defmodule JSON.LD.DecoderTest do
"name": "Gregg Kellogg"
}),
[
{~I<http://example.com/about#gregg>, NS.RDF.type, ~I<http://schema.org/Person>},
{~I<http://example.com/about#gregg>, ~I<http://schema.org/name>, RDF.literal("Gregg Kellogg")},
{~I<http://example.com/about#gregg>, NS.RDF.type(), ~I<http://schema.org/Person>},
{~I<http://example.com/about#gregg>, ~I<http://schema.org/name>,
RDF.literal("Gregg Kellogg")}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -273,8 +273,7 @@ defmodule JSON.LD.DecoderTest do
describe "chaining" do
%{
"explicit subject" =>
{
"explicit subject" => {
~s({
"@context": {"foaf": "http://xmlns.com/foaf/0.1/"},
"@id": "http://greggkellogg.net/foaf#me",
@ -284,12 +283,13 @@ defmodule JSON.LD.DecoderTest do
}
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, ~I<http://www.ivan-herman.net/foaf#me>},
{~I<http://www.ivan-herman.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Ivan Herman")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
~I<http://www.ivan-herman.net/foaf#me>},
{~I<http://www.ivan-herman.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/name>,
RDF.literal("Ivan Herman")}
]
},
"implicit subject" =>
{
"implicit subject" => {
~s({
"@context": {"foaf": "http://xmlns.com/foaf/0.1/"},
"@id": "http://greggkellogg.net/foaf#me",
@ -298,12 +298,13 @@ defmodule JSON.LD.DecoderTest do
}
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.bnode("b0")},
{RDF.bnode("b0"), ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Manu Sporny")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.bnode("b0")},
{RDF.bnode("b0"), ~I<http://xmlns.com/foaf/0.1/name>, RDF.literal("Manu Sporny")}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -313,20 +314,21 @@ defmodule JSON.LD.DecoderTest do
describe "multiple values" do
%{
"literals" =>
{
"literals" => {
~s({
"@context": {"foaf": "http://xmlns.com/foaf/0.1/"},
"@id": "http://greggkellogg.net/foaf#me",
"foaf:knows": ["Manu Sporny", "Ivan Herman"]
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.literal("Manu Sporny")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.literal("Ivan Herman")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.literal("Manu Sporny")},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.literal("Ivan Herman")}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -342,7 +344,7 @@ defmodule JSON.LD.DecoderTest do
"@id": "http://greggkellogg.net/foaf#me",
"foaf:knows": {"@list": []}
}),
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, NS.RDF.nil}
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, NS.RDF.nil()}
},
"single value" => {
~s({
@ -351,9 +353,10 @@ defmodule JSON.LD.DecoderTest do
"foaf:knows": {"@list": ["Manu Sporny"]}
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first, RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest, NS.RDF.nil},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first(), RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest(), NS.RDF.nil()}
]
},
"single value (with coercion)" => {
@ -366,9 +369,10 @@ defmodule JSON.LD.DecoderTest do
"foaf:knows": ["Manu Sporny"]
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first, RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest, NS.RDF.nil},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first(), RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest(), NS.RDF.nil()}
]
},
"multiple values" => {
@ -378,15 +382,16 @@ defmodule JSON.LD.DecoderTest do
"foaf:knows": {"@list": ["Manu Sporny", "Dave Longley"]}
}),
[
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first, RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest, RDF.bnode("b1")},
{RDF.bnode("b1"), NS.RDF.first, RDF.literal("Dave Longley")},
{RDF.bnode("b1"), NS.RDF.rest, NS.RDF.nil},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
RDF.bnode("b0")},
{RDF.bnode("b0"), NS.RDF.first(), RDF.literal("Manu Sporny")},
{RDF.bnode("b0"), NS.RDF.rest(), RDF.bnode("b1")},
{RDF.bnode("b1"), NS.RDF.first(), RDF.literal("Dave Longley")},
{RDF.bnode("b1"), NS.RDF.rest(), NS.RDF.nil()}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -396,8 +401,7 @@ defmodule JSON.LD.DecoderTest do
describe "context" do
%{
"@id coersion" =>
{
"@id coersion" => {
~s({
"@context": {
"knows": {"@id": "http://xmlns.com/foaf/0.1/knows", "@type": "@id"}
@ -405,10 +409,10 @@ defmodule JSON.LD.DecoderTest do
"@id": "http://greggkellogg.net/foaf#me",
"knows": "http://www.ivan-herman.net/foaf#me"
}),
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>, ~I<http://www.ivan-herman.net/foaf#me>},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://xmlns.com/foaf/0.1/knows>,
~I<http://www.ivan-herman.net/foaf#me>}
},
"datatype coersion" =>
{
"datatype coersion" => {
~s({
"@context": {
"dcterms": "http://purl.org/dc/terms/",
@ -418,7 +422,8 @@ defmodule JSON.LD.DecoderTest do
"@id": "http://greggkellogg.net/foaf#me",
"created": "1957-02-27"
}),
{~I<http://greggkellogg.net/foaf#me>, ~I<http://purl.org/dc/terms/created>, RDF.literal("1957-02-27", datatype: XSD.date)},
{~I<http://greggkellogg.net/foaf#me>, ~I<http://purl.org/dc/terms/created>,
RDF.literal("1957-02-27", datatype: XSD.date())}
},
"sub-objects with context" => {
~s({
@ -430,7 +435,7 @@ defmodule JSON.LD.DecoderTest do
}),
[
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.bnode("b1")},
{RDF.bnode("b1"), ~I<http://example.org/foo>, RDF.literal("bar")},
{RDF.bnode("b1"), ~I<http://example.org/foo>, RDF.literal("bar")}
]
},
"contexts with a list processed in order" => {
@ -441,7 +446,7 @@ defmodule JSON.LD.DecoderTest do
],
"foo": "bar"
}),
{RDF.bnode("b0"), ~I<http://example.org/foo>, RDF.literal("bar")},
{RDF.bnode("b0"), ~I<http://example.org/foo>, RDF.literal("bar")}
},
"term definition resolves term as IRI" => {
~s({
@ -451,7 +456,7 @@ defmodule JSON.LD.DecoderTest do
],
"bar": "bar"
}),
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.literal("bar")},
{RDF.bnode("b0"), ~I<http://example.com/foo>, RDF.literal("bar")}
},
"term definition resolves prefix as IRI" => {
~s({
@ -461,7 +466,7 @@ defmodule JSON.LD.DecoderTest do
],
"bar": "bar"
}),
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("bar")},
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("bar")}
},
"@language" => {
~s({
@ -471,7 +476,7 @@ defmodule JSON.LD.DecoderTest do
},
"foo:bar": "baz"
}),
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz", language: "en")},
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz", language: "en")}
},
"@language with override" => {
~s({
@ -481,7 +486,7 @@ defmodule JSON.LD.DecoderTest do
},
"foo:bar": {"@value": "baz", "@language": "fr"}
}),
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz", language: "fr")},
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz", language: "fr")}
},
"@language with plain" => {
~s({
@ -491,10 +496,10 @@ defmodule JSON.LD.DecoderTest do
},
"foo:bar": {"@value": "baz"}
}),
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz")},
},
{RDF.bnode("b0"), ~I<http://example.com/foo#bar>, RDF.literal("baz")}
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -510,7 +515,7 @@ defmodule JSON.LD.DecoderTest do
],
"foo": "bar"
}),
{RDF.bnode("b0"), ~I<http://example.org/foo#>, RDF.literal("bar", datatype: XSD.date)},
{RDF.bnode("b0"), ~I<http://example.org/foo#>, RDF.literal("bar", datatype: XSD.date())}
},
"@id with term" => {
~s({
@ -519,7 +524,7 @@ defmodule JSON.LD.DecoderTest do
],
"foo": "http://example.org/foo#bar"
}),
{RDF.bnode("b0"), ~I<http://example.org/foo#bar>, ~I<http://example.org/foo#bar>},
{RDF.bnode("b0"), ~I<http://example.org/foo#bar>, ~I<http://example.org/foo#bar>}
},
"coercion without term definition" => {
~s({
@ -534,10 +539,11 @@ defmodule JSON.LD.DecoderTest do
],
"dc:date": "2011-11-23"
}),
{RDF.bnode("b0"), ~I<http://purl.org/dc/terms/date>, RDF.literal("2011-11-23", datatype: XSD.date)},
},
{RDF.bnode("b0"), ~I<http://purl.org/dc/terms/date>,
RDF.literal("2011-11-23", datatype: XSD.date())}
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test "term def with @id + @type coercion: #{title}", %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -555,8 +561,8 @@ defmodule JSON.LD.DecoderTest do
}),
[
{RDF.bnode("b0"), ~I<http://example.org/foo#>, RDF.bnode("b1")},
{RDF.bnode("b1"), NS.RDF.first, RDF.literal("bar", datatype: XSD.date)},
{RDF.bnode("b1"), NS.RDF.rest, NS.RDF.nil},
{RDF.bnode("b1"), NS.RDF.first(), RDF.literal("bar", datatype: XSD.date())},
{RDF.bnode("b1"), NS.RDF.rest(), NS.RDF.nil()}
]
},
"@id with term" => {
@ -568,12 +574,12 @@ defmodule JSON.LD.DecoderTest do
}),
[
{RDF.bnode("b0"), ~I<http://example.org/foo#bar>, RDF.bnode("b1")},
{RDF.bnode("b1"), NS.RDF.first, ~I<http://example.org/foo#bar>},
{RDF.bnode("b1"), NS.RDF.rest, NS.RDF.nil},
{RDF.bnode("b1"), NS.RDF.first(), ~I<http://example.org/foo#bar>},
{RDF.bnode("b1"), NS.RDF.rest(), NS.RDF.nil()}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test "term def with @id + @type + @container list: #{title}", %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
@ -601,39 +607,37 @@ defmodule JSON.LD.DecoderTest do
describe "advanced features" do
%{
"number syntax (decimal)" =>
{
"number syntax (decimal)" => {
~s({"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3}),
{RDF.bnode("b0"), ~I<http://example/measure#cups>, RDF.literal("5.3E0", datatype: XSD.double)}
{RDF.bnode("b0"), ~I<http://example/measure#cups>,
RDF.literal("5.3E0", datatype: XSD.double())}
},
"number syntax (double)" =>
{
"number syntax (double)" => {
~s({"@context": { "measure": "http://example/measure#"}, "measure:cups": 5.3e0}),
{RDF.bnode("b0"), ~I<http://example/measure#cups>, RDF.literal("5.3E0", datatype: XSD.double)}
{RDF.bnode("b0"), ~I<http://example/measure#cups>,
RDF.literal("5.3E0", datatype: XSD.double())}
},
"number syntax (integer)" =>
{
"number syntax (integer)" => {
~s({"@context": { "chem": "http://example/chem#"}, "chem:protons": 12}),
{RDF.bnode("b0"), ~I<http://example/chem#protons>, RDF.literal("12", datatype: XSD.integer)}
{RDF.bnode("b0"), ~I<http://example/chem#protons>,
RDF.literal("12", datatype: XSD.integer())}
},
"boolan syntax" =>
{
"boolan syntax" => {
~s({"@context": { "sensor": "http://example/sensor#"}, "sensor:active": true}),
{RDF.bnode("b0"), ~I<http://example/sensor#active>, RDF.literal("true", datatype: XSD.boolean)}
{RDF.bnode("b0"), ~I<http://example/sensor#active>,
RDF.literal("true", datatype: XSD.boolean())}
},
"Array top element" =>
{
"Array top element" => {
~s([
{"@id": "http://example.com/#me", "@type": "http://xmlns.com/foaf/0.1/Person"},
{"@id": "http://example.com/#you", "@type": "http://xmlns.com/foaf/0.1/Person"}
]),
[
{~I<http://example.com/#me>, NS.RDF.type, ~I<http://xmlns.com/foaf/0.1/Person>},
{~I<http://example.com/#you>, NS.RDF.type, ~I<http://xmlns.com/foaf/0.1/Person>}
{~I<http://example.com/#me>, NS.RDF.type(), ~I<http://xmlns.com/foaf/0.1/Person>},
{~I<http://example.com/#you>, NS.RDF.type(), ~I<http://xmlns.com/foaf/0.1/Person>}
]
},
"@graph with array of objects value" =>
{
"@graph with array of objects value" => {
~s({
"@context": {"foaf": "http://xmlns.com/foaf/0.1/"},
"@graph": [
@ -642,27 +646,26 @@ defmodule JSON.LD.DecoderTest do
]
}),
[
{~I<http://example.com/#me>, NS.RDF.type, ~I<http://xmlns.com/foaf/0.1/Person>},
{~I<http://example.com/#you>, NS.RDF.type, ~I<http://xmlns.com/foaf/0.1/Person>}
{~I<http://example.com/#me>, NS.RDF.type(), ~I<http://xmlns.com/foaf/0.1/Person>},
{~I<http://example.com/#you>, NS.RDF.type(), ~I<http://xmlns.com/foaf/0.1/Person>}
]
},
"XMLLiteral" =>
{
"XMLLiteral" => {
~s({
"http://rdfs.org/sioc/ns#content": {
"@value": "foo",
"@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral"
}
}),
{RDF.bnode("b0"), ~I<http://rdfs.org/sioc/ns#content>, RDF.literal("foo", datatype: "http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral")}
{RDF.bnode("b0"), ~I<http://rdfs.org/sioc/ns#content>,
RDF.literal("foo", datatype: "http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral")}
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
assert JSON.LD.Decoder.decode!(input) == RDF.Dataset.new(output)
end
end)
end
end

View File

@ -19,16 +19,16 @@ defmodule JSON.LD.EncoderTest do
@compile {:no_warn_undefined, JSON.LD.EncoderTest.TestNS.EX}
@compile {:no_warn_undefined, JSON.LD.EncoderTest.TestNS.S}
def gets_serialized_to(input, output, opts \\ []) do
data_structs = Keyword.get(opts, :data_structs, [Dataset, Graph])
Enum.each data_structs, fn data_struct ->
Enum.each(data_structs, fn data_struct ->
assert JSON.LD.Encoder.from_rdf!(data_struct.new(input), opts) == output
end
end)
end
test "pretty printing" do
dataset = Dataset.new {~I<http://a/b>, ~I<http://a/c>, ~I<http://a/d>}
dataset = Dataset.new({~I<http://a/b>, ~I<http://a/c>, ~I<http://a/d>})
assert JSON.LD.Encoder.encode!(dataset) ==
"[{\"@id\":\"http://a/b\",\"http://a/c\":[{\"@id\":\"http://a/d\"}]}]"
@ -45,119 +45,193 @@ defmodule JSON.LD.EncoderTest do
]
}
]
""" |> String.trim()
"""
|> String.trim()
end
test "an empty RDF.Dataset is serialized to an JSON array string" do
assert JSON.LD.Encoder.encode!(Dataset.new) == "[]"
assert JSON.LD.Encoder.encode!(Dataset.new()) == "[]"
end
describe "simple tests" do
test "One subject IRI object" do
{~I<http://a/b>, ~I<http://a/c>, ~I<http://a/d>}
|> gets_serialized_to([%{
|> gets_serialized_to(
[
%{
"@id" => "http://a/b",
"http://a/c" => [%{"@id" => "http://a/d"}]
}], data_structs: [Dataset, Graph, Description])
}
],
data_structs: [Dataset, Graph, Description]
)
end
test "should generate object list" do
[{EX.b, EX.c, EX.d}, {EX.b, EX.c, EX.e}]
|> gets_serialized_to([%{
[{EX.b(), EX.c(), EX.d()}, {EX.b(), EX.c(), EX.e()}]
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/b",
"http://example.com/c" => [
%{"@id" => "http://example.com/d"},
%{"@id" => "http://example.com/e"}
]
}], data_structs: [Dataset, Graph, Description])
}
],
data_structs: [Dataset, Graph, Description]
)
end
test "should generate property list" do
[{EX.b, EX.c, EX.d}, {EX.b, EX.e, EX.f}]
|> gets_serialized_to([%{
[{EX.b(), EX.c(), EX.d()}, {EX.b(), EX.e(), EX.f()}]
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/b",
"http://example.com/c" => [%{"@id" => "http://example.com/d"}],
"http://example.com/e" => [%{"@id" => "http://example.com/f"}]
}], data_structs: [Dataset, Graph, Description])
}
],
data_structs: [Dataset, Graph, Description]
)
end
test "serializes multiple subjects" do
[
{~I<http://test-cases/0001>, NS.RDF.type, ~I<http://www.w3.org/2006/03/test-description#TestCase>},
{~I<http://test-cases/0002>, NS.RDF.type, ~I<http://www.w3.org/2006/03/test-description#TestCase>}
{~I<http://test-cases/0001>, NS.RDF.type(),
~I<http://www.w3.org/2006/03/test-description#TestCase>},
{~I<http://test-cases/0002>, NS.RDF.type(),
~I<http://www.w3.org/2006/03/test-description#TestCase>}
]
|> gets_serialized_to([
%{"@id" => "http://test-cases/0001", "@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]},
%{"@id" => "http://test-cases/0002", "@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]},
%{
"@id" => "http://test-cases/0001",
"@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]
},
%{
"@id" => "http://test-cases/0002",
"@type" => ["http://www.w3.org/2006/03/test-description#TestCase"]
}
])
end
end
describe "literal coercion" do
test "typed literal" do
{EX.a, EX.b, RDF.literal("foo", datatype: EX.d)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal("foo", datatype: EX.d())}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "foo", "@type" => "http://example.com/d"}]
}], data_structs: [Dataset, Graph, Description])
}
],
data_structs: [Dataset, Graph, Description]
)
end
test "integer" do
{EX.a, EX.b, RDF.literal(1)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(1)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => 1}]
}], use_native_types: true)
}
],
use_native_types: true
)
end
test "integer (non-native)" do
{EX.a, EX.b, RDF.literal(1)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(1)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "1","@type" => "http://www.w3.org/2001/XMLSchema#integer"}]
}], use_native_types: false)
"http://example.com/b" => [
%{"@value" => "1", "@type" => "http://www.w3.org/2001/XMLSchema#integer"}
]
}
],
use_native_types: false
)
end
test "boolean" do
{EX.a, EX.b, RDF.literal(true)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(true)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => true}]
}], use_native_types: true)
}
],
use_native_types: true
)
end
test "boolean (non-native)" do
{EX.a, EX.b, RDF.literal(true)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(true)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "true","@type" => "http://www.w3.org/2001/XMLSchema#boolean"}]
}], use_native_types: false)
"http://example.com/b" => [
%{"@value" => "true", "@type" => "http://www.w3.org/2001/XMLSchema#boolean"}
]
}
],
use_native_types: false
)
end
@tag skip: "TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
@tag skip:
"TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
test "decimal" do
{EX.a, EX.b, RDF.literal(1.0)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(1.0)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "1.0", "@type" => "http://www.w3.org/2001/XMLSchema#decimal"}]
}], use_native_types: true)
"http://example.com/b" => [
%{"@value" => "1.0", "@type" => "http://www.w3.org/2001/XMLSchema#decimal"}
]
}
],
use_native_types: true
)
end
test "double" do
{EX.a, EX.b, RDF.literal(1.0e0)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(1.0e0)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => 1.0E0}]
}], use_native_types: true)
"http://example.com/b" => [%{"@value" => 1.0e0}]
}
],
use_native_types: true
)
end
@tag skip: "TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
@tag skip:
"TODO: Is this spec conformant or RDF.rb specific? RDF.rb doesn't use the specified RDF to Object Conversion algorithm but reuses a generalized expand_value algorithm"
test "double (non-native)" do
{EX.a, EX.b, RDF.literal(1.0e0)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(1.0e0)}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "1.0E0", "@type" => "http://www.w3.org/2001/XMLSchema#double"}]
}], use_native_types: false)
"http://example.com/b" => [
%{"@value" => "1.0E0", "@type" => "http://www.w3.org/2001/XMLSchema#double"}
]
}
],
use_native_types: false
)
end
end
@ -168,50 +242,70 @@ defmodule JSON.LD.EncoderTest do
nonNegativeInteger: 1,
float: 1.0,
nonPositiveInteger: -1,
negativeInteger: -1,
negativeInteger: -1
}
|> Enum.each(fn ({type, _} = data) ->
|> Enum.each(fn {type, _} = data ->
@tag data: data
test "#{type}", %{data: {type, value}} do
{EX.a, EX.b, RDF.literal(value, datatype: apply(NS.XSD, type, []))}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal(value, datatype: apply(NS.XSD, type, []))}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "#{value}", "@type" => "http://www.w3.org/2001/XMLSchema##{type}"}]
}], use_native_types: false)
"http://example.com/b" => [
%{"@value" => "#{value}", "@type" => "http://www.w3.org/2001/XMLSchema##{type}"}
]
}
],
use_native_types: false
)
end
end)
test "when useNativeTypes" do
{EX.a, EX.b, RDF.literal("foo", datatype: EX.customType)}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal("foo", datatype: EX.customType())}
|> gets_serialized_to(
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "foo", "@type" => to_string(EX.customType)}]
}], use_native_types: true)
"http://example.com/b" => [
%{"@value" => "foo", "@type" => to_string(EX.customType())}
]
}
],
use_native_types: true
)
end
end
test "encodes language literal" do
{EX.a, EX.b, RDF.literal("foo", language: "en-us")}
|> gets_serialized_to([%{
{EX.a(), EX.b(), RDF.literal("foo", language: "en-us")}
|> gets_serialized_to([
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@value" => "foo", "@language" => "en-us"}]
}])
}
])
end
describe "blank nodes" do
test "should generate blank nodes" do
{RDF.bnode(:a), EX.a, EX.b}
|> gets_serialized_to([%{
{RDF.bnode(:a), EX.a(), EX.b()}
|> gets_serialized_to(
[
%{
"@id" => "_:a",
"http://example.com/a" => [%{"@id" => "http://example.com/b"}]
}], data_structs: [Dataset, Graph, Description])
}
],
data_structs: [Dataset, Graph, Description]
)
end
test "should generate blank nodes as object" do
[
{EX.a, EX.b, RDF.bnode(:a)},
{RDF.bnode(:a), EX.c, EX.d}
{EX.a(), EX.b(), RDF.bnode(:a)},
{RDF.bnode(:a), EX.c(), EX.d()}
]
|> gets_serialized_to([
%{
@ -230,63 +324,75 @@ defmodule JSON.LD.EncoderTest do
%{
"literal list" => {
[
{EX.a, EX.b, RDF.bnode(:e1) },
{RDF.bnode(:e1), NS.RDF.first, ~L"apple"},
{RDF.bnode(:e1), NS.RDF.rest, RDF.bnode(:e2)},
{RDF.bnode(:e2), NS.RDF.first, ~L"banana"},
{RDF.bnode(:e2), NS.RDF.rest, NS.RDF.nil},
{EX.a(), EX.b(), RDF.bnode(:e1)},
{RDF.bnode(:e1), NS.RDF.first(), ~L"apple"},
{RDF.bnode(:e1), NS.RDF.rest(), RDF.bnode(:e2)},
{RDF.bnode(:e2), NS.RDF.first(), ~L"banana"},
{RDF.bnode(:e2), NS.RDF.rest(), NS.RDF.nil()}
],
[%{
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{
"http://example.com/b" => [
%{
"@list" => [
%{"@value" => "apple"},
%{"@value" => "banana"}
]
}]
}]
}
]
}
]
},
"iri list" => {
[
{EX.a, EX.b, RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first, EX.c},
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
{EX.a(), EX.b(), RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first(), EX.c()},
{RDF.bnode(:list), NS.RDF.rest(), NS.RDF.nil()}
],
[%{
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{
"http://example.com/b" => [
%{
"@list" => [
%{"@id" => "http://example.com/c"}
]
}]
}]
}
]
}
]
},
"empty list" => {
[
{EX.a, EX.b, NS.RDF.nil},
{EX.a(), EX.b(), NS.RDF.nil()}
],
[%{
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@list" => []}]
}]
}
]
},
"single element list" => {
[
{EX.a, EX.b, RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first, ~L"apple"},
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
{EX.a(), EX.b(), RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first(), ~L"apple"},
{RDF.bnode(:list), NS.RDF.rest(), NS.RDF.nil()}
],
[%{
[
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@list" => [%{"@value" => "apple"}]}]
}]
}
]
},
"single element list without @type" => {
[
{EX.a, EX.b, RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first, RDF.bnode(:a)},
{RDF.bnode(:list), NS.RDF.rest, NS.RDF.nil},
{RDF.bnode(:a), EX.b, ~L"foo"},
{EX.a(), EX.b(), RDF.bnode(:list)},
{RDF.bnode(:list), NS.RDF.first(), RDF.bnode(:a)},
{RDF.bnode(:list), NS.RDF.rest(), NS.RDF.nil()},
{RDF.bnode(:a), EX.b(), ~L"foo"}
],
[
%{
@ -296,46 +402,55 @@ defmodule JSON.LD.EncoderTest do
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@list" => [%{"@id" => "_:a"}]}]
},
}
]
},
"multiple graphs with shared BNode" => {
[
{EX.z, EX.q, RDF.bnode(:z0), EX.G},
{RDF.bnode(:z0), NS.RDF.first, ~L"cell-A", EX.G},
{RDF.bnode(:z0), NS.RDF.rest, RDF.bnode(:z1), EX.G},
{RDF.bnode(:z1), NS.RDF.first, ~L"cell-B", EX.G},
{RDF.bnode(:z1), NS.RDF.rest, NS.RDF.nil, EX.G},
{EX.x, EX.p, RDF.bnode(:z1), EX.G1},
{EX.z(), EX.q(), RDF.bnode(:z0), EX.G},
{RDF.bnode(:z0), NS.RDF.first(), ~L"cell-A", EX.G},
{RDF.bnode(:z0), NS.RDF.rest(), RDF.bnode(:z1), EX.G},
{RDF.bnode(:z1), NS.RDF.first(), ~L"cell-B", EX.G},
{RDF.bnode(:z1), NS.RDF.rest(), NS.RDF.nil(), EX.G},
{EX.x(), EX.p(), RDF.bnode(:z1), EX.G1}
],
[%{
[
%{
"@id" => "http://www.example.com/G",
"@graph" => [%{
"@graph" => [
%{
"@id" => "_:z0",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [%{"@value" => "cell-A"}],
"http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [%{"@id" => "_:z1"}]
}, %{
},
%{
"@id" => "_:z1",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#first" => [%{"@value" => "cell-B"}],
"http://www.w3.org/1999/02/22-rdf-syntax-ns#rest" => [%{"@list" => []}]
}, %{
},
%{
"@id" => "http://www.example.com/z",
"http://www.example.com/q" => [%{"@id" => "_:z0"}]
}]
}
]
},
%{
"@id" => "http://www.example.com/G1",
"@graph" => [%{
"@graph" => [
%{
"@id" => "http://www.example.com/x",
"http://www.example.com/p" => [%{"@id" => "_:z1"}]
}]
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
]
}
}
|> Enum.each(fn {title, data} ->
if title == "multiple graphs with shared BNode" do
@tag skip: "TODO: https://github.com/json-ld/json-ld.org/issues/357"
end
@tag data: data
test title, %{data: {input, output}} do
input |> gets_serialized_to(output)
@ -346,61 +461,23 @@ defmodule JSON.LD.EncoderTest do
describe "quads" do
%{
"simple named graph" => %{
input: {EX.a, EX.b, EX.c, EX.U},
input: {EX.a(), EX.b(), EX.c(), EX.U},
output: [
%{
"@id" => "http://example.com/U",
"@graph" => [%{
"@graph" => [
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@id" => "http://example.com/c"}]
}]
},
}
]
}
]
},
"with properties" => %{
input: [
{EX.a, EX.b, EX.c, EX.U},
{EX.U, EX.d, EX.e},
],
output: [
%{
"@id" => "http://example.com/U",
"@graph" => [%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@id" => "http://example.com/c"}]
}],
"http://example.com/d" => [%{"@id" => "http://example.com/e"}]
}
]
},
"with lists" => %{
input: [
{EX.a, EX.b, RDF.bnode(:a), EX.U},
{RDF.bnode(:a), NS.RDF.first, EX.c, EX.U},
{RDF.bnode(:a), NS.RDF.rest, NS.RDF.nil, EX.U},
{EX.U, EX.d, RDF.bnode(:b)},
{RDF.bnode(:b), NS.RDF.first, EX.e},
{RDF.bnode(:b), NS.RDF.rest, NS.RDF.nil},
],
output: [
%{
"@id" => "http://example.com/U",
"@graph" => [%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@list" => [%{"@id" => "http://example.com/c"}]}]
}],
"http://example.com/d" => [%{"@list" => [%{"@id" => "http://example.com/e"}]}]
}
]
},
"Two Graphs with same subject and lists" => %{
input: [
{EX.a, EX.b, RDF.bnode(:a), EX.U},
{RDF.bnode(:a), NS.RDF.first, EX.c, EX.U},
{RDF.bnode(:a), NS.RDF.rest, NS.RDF.nil, EX.U},
{EX.a, EX.b, RDF.bnode(:b), EX.V},
{RDF.bnode(:b), NS.RDF.first, EX.e, EX.V},
{RDF.bnode(:b), NS.RDF.rest, NS.RDF.nil, EX.V},
{EX.a(), EX.b(), EX.c(), EX.U},
{EX.U, EX.d(), EX.e()}
],
output: [
%{
@ -408,9 +485,55 @@ defmodule JSON.LD.EncoderTest do
"@graph" => [
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{
"http://example.com/b" => [%{"@id" => "http://example.com/c"}]
}
],
"http://example.com/d" => [%{"@id" => "http://example.com/e"}]
}
]
},
"with lists" => %{
input: [
{EX.a(), EX.b(), RDF.bnode(:a), EX.U},
{RDF.bnode(:a), NS.RDF.first(), EX.c(), EX.U},
{RDF.bnode(:a), NS.RDF.rest(), NS.RDF.nil(), EX.U},
{EX.U, EX.d(), RDF.bnode(:b)},
{RDF.bnode(:b), NS.RDF.first(), EX.e()},
{RDF.bnode(:b), NS.RDF.rest(), NS.RDF.nil()}
],
output: [
%{
"@id" => "http://example.com/U",
"@graph" => [
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{"@list" => [%{"@id" => "http://example.com/c"}]}]
}
],
"http://example.com/d" => [%{"@list" => [%{"@id" => "http://example.com/e"}]}]
}
]
},
"Two Graphs with same subject and lists" => %{
input: [
{EX.a(), EX.b(), RDF.bnode(:a), EX.U},
{RDF.bnode(:a), NS.RDF.first(), EX.c(), EX.U},
{RDF.bnode(:a), NS.RDF.rest(), NS.RDF.nil(), EX.U},
{EX.a(), EX.b(), RDF.bnode(:b), EX.V},
{RDF.bnode(:b), NS.RDF.first(), EX.e(), EX.V},
{RDF.bnode(:b), NS.RDF.rest(), NS.RDF.nil(), EX.V}
],
output: [
%{
"@id" => "http://example.com/U",
"@graph" => [
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [
%{
"@list" => [%{"@id" => "http://example.com/c"}]
}]
}
]
}
]
},
@ -419,16 +542,18 @@ defmodule JSON.LD.EncoderTest do
"@graph" => [
%{
"@id" => "http://example.com/a",
"http://example.com/b" => [%{
"http://example.com/b" => [
%{
"@list" => [%{"@id" => "http://example.com/e"}]
}]
}
]
}
]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: %{input: input, output: output}} do
input |> gets_serialized_to(output, data_structs: [Dataset])
@ -439,21 +564,22 @@ defmodule JSON.LD.EncoderTest do
describe "problems" do
%{
"xsd:boolean as value" => {
{~I<http://data.wikia.com/terms#playable>, NS.RDFS.range, NS.XSD.boolean},
[%{
{~I<http://data.wikia.com/terms#playable>, NS.RDFS.range(), NS.XSD.boolean()},
[
%{
"@id" => "http://data.wikia.com/terms#playable",
"http://www.w3.org/2000/01/rdf-schema#range" => [
%{ "@id" => "http://www.w3.org/2001/XMLSchema#boolean" }
%{"@id" => "http://www.w3.org/2001/XMLSchema#boolean"}
]
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: {input, output}} do
input |> gets_serialized_to(output)
end
end)
end
end

View File

@ -6,7 +6,8 @@ defmodule JSON.LD.ExpansionTest do
alias RDF.NS.{RDFS, XSD}
test "Expanded form of a JSON-LD document (EXAMPLE 55 and 56 of https://www.w3.org/TR/json-ld/#expanded-document-form)" do
input = Jason.decode! """
input =
Jason.decode!("""
{
"@context":
{
@ -19,8 +20,10 @@ defmodule JSON.LD.ExpansionTest do
"name": "Manu Sporny",
"homepage": "http://manu.sporny.org/"
}
"""
assert JSON.LD.expand(input) == Jason.decode! """
""")
assert JSON.LD.expand(input) ==
Jason.decode!("""
[
{
"http://xmlns.com/foaf/0.1/name": [
@ -31,7 +34,7 @@ defmodule JSON.LD.ExpansionTest do
]
}
]
"""
""")
end
%{
@ -44,19 +47,23 @@ defmodule JSON.LD.ExpansionTest do
"@context" => %{
"foo" => %{"@id" => "http://example.com/foo", "@container" => "@list"}
},
"foo" => [%{"@value" => "bar"}]\
"foo" => [%{"@value" => "bar"}]
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@list" => [%{"@value" => "bar"}]}]
}]
}
]
},
"native values in list" => %{
input: %{
"http://example.com/foo" => %{"@list" => [1, 2]}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@list" => [%{"@value" => 1}, %{"@value" => 2}]}]
}]
}
]
},
"@graph" => %{
input: %{
@ -94,7 +101,7 @@ defmodule JSON.LD.ExpansionTest do
output: [%{"http://example.com/ex" => [%{"@value" => false}]}]
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -106,43 +113,51 @@ defmodule JSON.LD.ExpansionTest do
"base" => %{
input: %{
"@id" => "",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
output: [%{
output: [
%{
"@id" => "http://example.org/",
"@type" => [RDFS.Resource |> RDF.uri |> to_string]
}]
"@type" => [RDFS.Resource |> RDF.uri() |> to_string]
}
]
},
"relative" => %{
input: %{
"@id" => "a/b",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
output: [%{
output: [
%{
"@id" => "http://example.org/a/b",
"@type" => [RDFS.Resource |> RDF.uri |> to_string]
}]
"@type" => [RDFS.Resource |> RDF.uri() |> to_string]
}
]
},
"hash" => %{
input: %{
"@id" => "#a",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
output: [%{
output: [
%{
"@id" => "http://example.org/#a",
"@type" => [RDFS.Resource |> RDF.uri |> to_string]
}]
"@type" => [RDFS.Resource |> RDF.uri() |> to_string]
}
]
},
"unmapped @id" => %{
input: %{
"http://example.com/foo" => %{"@id" => "bar"}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@id" => "http://example.org/bar"}]
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input, base: "http://example.org/") == data.output
@ -156,53 +171,65 @@ defmodule JSON.LD.ExpansionTest do
input: %{
"@context" => %{"id" => "@id"},
"id" => "",
"@type" => (RDFS.Resource |> RDF.uri |> to_string)
"@type" => RDFS.Resource |> RDF.uri() |> to_string
},
output: [%{
output: [
%{
"@id" => "",
"@type" =>[ (RDFS.Resource |> RDF.uri |> to_string)]
}]
"@type" => [RDFS.Resource |> RDF.uri() |> to_string]
}
]
},
"@type" => %{
input: %{
"@context" => %{"type" => "@type"},
"type" => (RDFS.Resource |> RDF.uri |> to_string),
"type" => RDFS.Resource |> RDF.uri() |> to_string,
"http://example.com/foo" => %{"@value" => "bar", "type" => "http://example.com/baz"}
},
output: [%{
"@type" => [RDFS.Resource |> RDF.uri |> to_string],
"http://example.com/foo" => [%{"@value" => "bar", "@type" => "http://example.com/baz"}]
}]
output: [
%{
"@type" => [RDFS.Resource |> RDF.uri() |> to_string],
"http://example.com/foo" => [
%{"@value" => "bar", "@type" => "http://example.com/baz"}
]
}
]
},
"@language" => %{
input: %{
"@context" => %{"language" => "@language"},
"http://example.com/foo" => %{"@value" => "bar", "language" => "baz"}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@value" => "bar", "@language" => "baz"}]
}]
}
]
},
"@value" => %{
input: %{
"@context" => %{"literal" => "@value"},
"http://example.com/foo" => %{"literal" => "bar"}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@value" => "bar"}]
}]
}
]
},
"@list" => %{
input: %{
"@context" => %{"list" => "@list"},
"http://example.com/foo" => %{"list" => ["bar"]}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@list" => [%{"@value" => "bar"}]}]
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -217,48 +244,58 @@ defmodule JSON.LD.ExpansionTest do
"@context" => %{"e" => "http://example.org/vocab#"},
"e:bool" => true
},
output: [%{
output: [
%{
"http://example.org/vocab#bool" => [%{"@value" => true}]
}]
}
]
},
"false" => %{
input: %{
"@context" => %{"e" => "http://example.org/vocab#"},
"e:bool" => false
},
output: [%{
output: [
%{
"http://example.org/vocab#bool" => [%{"@value" => false}]
}]
}
]
},
"double" => %{
input: %{
"@context" => %{"e" => "http://example.org/vocab#"},
"e:double" => 1.23
},
output: [%{
output: [
%{
"http://example.org/vocab#double" => [%{"@value" => 1.23}]
}]
}
]
},
"double-zero" => %{
input: %{
"@context" => %{"e" => "http://example.org/vocab#"},
"e:double-zero" => 0.0e0
},
output: [%{
output: [
%{
"http://example.org/vocab#double-zero" => [%{"@value" => 0.0e0}]
}]
}
]
},
"integer" => %{
input: %{
"@context" => %{"e" => "http://example.org/vocab#"},
"e:integer" => 123
},
output: [%{
output: [
%{
"http://example.org/vocab#integer" => [%{"@value" => 123}]
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -270,24 +307,36 @@ defmodule JSON.LD.ExpansionTest do
%{
"boolean" => %{
input: %{
"@context" => %{"foo" => %{"@id" => "http://example.org/foo", "@type" => to_string(XSD.boolean)}},
"@context" => %{
"foo" => %{"@id" => "http://example.org/foo", "@type" => to_string(XSD.boolean())}
},
"foo" => "true"
},
output: [%{
"http://example.org/foo" => [%{"@value" => "true", "@type" => to_string(XSD.boolean)}]
}]
output: [
%{
"http://example.org/foo" => [
%{"@value" => "true", "@type" => to_string(XSD.boolean())}
]
}
]
},
"date" => %{
input: %{
"@context" => %{"foo" => %{"@id" => "http://example.org/foo", "@type" => to_string(XSD.date)}},
"@context" => %{
"foo" => %{"@id" => "http://example.org/foo", "@type" => to_string(XSD.date())}
},
"foo" => "2011-03-26"
},
output: [%{
"http://example.org/foo" => [%{"@value" => "2011-03-26", "@type" => to_string(XSD.date)}]
}]
},
output: [
%{
"http://example.org/foo" => [
%{"@value" => "2011-03-26", "@type" => to_string(XSD.date())}
]
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -317,9 +366,11 @@ defmodule JSON.LD.ExpansionTest do
input: %{
"http://example.com/foo" => [nil]
},
output: [%{
output: [
%{
"http://example.com/foo" => []
}]
}
]
},
"@set with null @value" => %{
input: %{
@ -327,12 +378,14 @@ defmodule JSON.LD.ExpansionTest do
%{"@value" => nil, "@type" => "http://example.org/Type"}
]
},
output: [%{
output: [
%{
"http://example.com/foo" => []
}]
}
]
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -347,8 +400,8 @@ defmodule JSON.LD.ExpansionTest do
"@context" => %{
"@language" => "en",
"ex" => "http://example.org/vocab#",
"ex:german" => %{ "@language" => "de" },
"ex:nolang" => %{ "@language" => nil }
"ex:german" => %{"@language" => "de"},
"ex:nolang" => %{"@language" => nil}
},
"ex:german" => "german",
"ex:nolang" => "no language"
@ -359,9 +412,9 @@ defmodule JSON.LD.ExpansionTest do
"http://example.org/vocab#nolang" => [%{"@value" => "no language"}]
}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -376,31 +429,37 @@ defmodule JSON.LD.ExpansionTest do
"@context" => %{"@vocab" => "http://example.com/"},
"verb" => %{"@value" => "foo"}
},
output: [%{
output: [
%{
"http://example.com/verb" => [%{"@value" => "foo"}]
}]
}
]
},
"datatype" => %{
input: %{
"@context" => %{"@vocab" => "http://example.com/"},
"http://example.org/verb" => %{"@value" => "foo", "@type" => "string"}
},
output: [%{
"http://example.org/verb" => [%{"@value" => "foo", "@type" => "http://example.com/string"}]
}]
output: [
%{
"http://example.org/verb" => [
%{"@value" => "foo", "@type" => "http://example.com/string"}
]
}
]
},
"expand-0028" => %{
input: %{
"@context" => %{
"@vocab" => "http://example.org/vocab#",
"date" => %{ "@type" => "dateTime" }
"date" => %{"@type" => "dateTime"}
},
"@id" => "example1",
"@type" => "test",
"date" => "2011-01-25T00:00:00Z",
"embed" => %{
"@id" => "example2",
"expandedDate" => %{ "@value" => "2012-08-01T00:00:00Z", "@type" => "dateTime" }
"expandedDate" => %{"@value" => "2012-08-01T00:00:00Z", "@type" => "dateTime"}
}
},
output: [
@ -428,7 +487,7 @@ defmodule JSON.LD.ExpansionTest do
]
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input, base: "http://foo/bar/") == data.output
@ -448,9 +507,11 @@ defmodule JSON.LD.ExpansionTest do
input: %{
"http://example.com/foo" => %{"@value" => "bar", "@type" => "baz"}
},
output: [%{
output: [
%{
"http://example.com/foo" => [%{"@value" => "bar", "@type" => "http://example/baz"}]
}]
}
]
},
"unknown keyword" => %{
input: %{
@ -477,14 +538,16 @@ defmodule JSON.LD.ExpansionTest do
"prop" => "prop"
}
},
output: [%{
output: [
%{
"@id" => "http://example.org/id1",
"http://example.org/prop" => [%{"@value" => "prop"}],
"http://example.org/chain" => [%{"@id" => "http://example.org/id2"}]
}
]}
]
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input, base: "http://example/") == data.output
@ -508,36 +571,48 @@ defmodule JSON.LD.ExpansionTest do
"coerced single element" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@container" => "@list"}},
"http://example.com/foo" => [ "foo" ]
"http://example.com/foo" => ["foo"]
},
output: [%{"http://example.com/foo" => [%{"@list" => [%{"@value" => "foo"}]}]}]
},
"coerced multiple elements" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@container" => "@list"}},
"http://example.com/foo" => [ "foo", "bar" ]
"http://example.com/foo" => ["foo", "bar"]
},
output: [%{
"http://example.com/foo" => [%{"@list" => [ %{"@value" => "foo"}, %{"@value" => "bar"} ]}]
}]
output: [
%{
"http://example.com/foo" => [
%{"@list" => [%{"@value" => "foo"}, %{"@value" => "bar"}]}
]
}
]
},
"explicit list with coerced @id values" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@type" => "@id"}},
"http://example.com/foo" => %{"@list" => ["http://foo", "http://bar"]}
},
output: [%{
"http://example.com/foo" => [%{"@list" => [%{"@id" => "http://foo"}, %{"@id" => "http://bar"}]}]
}]
output: [
%{
"http://example.com/foo" => [
%{"@list" => [%{"@id" => "http://foo"}, %{"@id" => "http://bar"}]}
]
}
]
},
"explicit list with coerced datatype values" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@type" => to_string(XSD.date)}},
"@context" => %{"http://example.com/foo" => %{"@type" => to_string(XSD.date())}},
"http://example.com/foo" => %{"@list" => ["2012-04-12"]}
},
output: [%{
"http://example.com/foo" => [%{"@list" => [%{"@value" => "2012-04-12", "@type" => to_string(XSD.date)}]}]
}]
output: [
%{
"http://example.com/foo" => [
%{"@list" => [%{"@value" => "2012-04-12", "@type" => to_string(XSD.date())}]}
]
}
]
},
"expand-0004" => %{
input: Jason.decode!(~s({
@ -564,7 +639,7 @@ defmodule JSON.LD.ExpansionTest do
]))
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -578,47 +653,57 @@ defmodule JSON.LD.ExpansionTest do
input: %{
"http://example.com/foo" => %{"@set" => []}
},
output: [%{
output: [
%{
"http://example.com/foo" => []
}]
}
]
},
"coerced empty" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@container" => "@set"}},
"http://example.com/foo" => []
},
output: [%{
output: [
%{
"http://example.com/foo" => []
}]
}
]
},
"coerced single element" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@container" => "@set"}},
"http://example.com/foo" => [ "foo" ]
"http://example.com/foo" => ["foo"]
},
output: [%{
"http://example.com/foo" => [ %{"@value" => "foo"} ]
}]
output: [
%{
"http://example.com/foo" => [%{"@value" => "foo"}]
}
]
},
"coerced multiple elements" => %{
input: %{
"@context" => %{"http://example.com/foo" => %{"@container" => "@set"}},
"http://example.com/foo" => [ "foo", "bar" ]
"http://example.com/foo" => ["foo", "bar"]
},
output: [%{
"http://example.com/foo" => [ %{"@value" => "foo"}, %{"@value" => "bar"} ]
}]
output: [
%{
"http://example.com/foo" => [%{"@value" => "foo"}, %{"@value" => "bar"}]
}
]
},
"array containing set" => %{
input: %{
"http://example.com/foo" => [%{"@set" => []}]
},
output: [%{
output: [
%{
"http://example.com/foo" => []
}]
},
}
|> Enum.each(fn ({title, data}) ->
]
}
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -640,7 +725,7 @@ defmodule JSON.LD.ExpansionTest do
"@id" => "http://example.com/queen",
"label" => %{
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
"de" => ["Die Königin", "Ihre Majestät"]
}
},
output: [
@ -653,43 +738,43 @@ defmodule JSON.LD.ExpansionTest do
]
}
]
},
# TODO: Only the order of result is not correct, the content seems ok (although
# it's not clear why, since the "http://example.com/vocab/label" object is not handled in 7.5 code (at least debug statement are not printed
# "expand-0035" => %{
# input: %{
# "@context" => %{
# "@vocab" => "http://example.com/vocab/",
# "@language" => "it",
# "label" => %{
# "@container" => "@language"
# }
# },
# "@id" => "http://example.com/queen",
# "label" => %{
# "en" => "The Queen",
# "de" => [ "Die Königin", "Ihre Majestät" ]
# },
# "http://example.com/vocab/label" => [
# "Il re",
# %{ "@value" => "The king", "@language" => "en" }
# ]
# },
# output: [
# %{
# "@id" => "http://example.com/queen",
# "http://example.com/vocab/label" => [
# %{"@value" => "Il re", "@language" => "it"},
# %{"@value" => "The king", "@language" => "en"},
# %{"@value" => "Die Königin", "@language" => "de"},
# %{"@value" => "Ihre Majestät", "@language" => "de"},
# %{"@value" => "The Queen", "@language" => "en"},
# ]
# }
# ]
# }
}
|> Enum.each(fn ({title, data}) ->
# TODO: Only the order of result is not correct, the content seems ok (although
# it's not clear why, since the "http://example.com/vocab/label" object is not handled in 7.5 code (at least debug statement are not printed
# "expand-0035" => %{
# input: %{
# "@context" => %{
# "@vocab" => "http://example.com/vocab/",
# "@language" => "it",
# "label" => %{
# "@container" => "@language"
# }
# },
# "@id" => "http://example.com/queen",
# "label" => %{
# "en" => "The Queen",
# "de" => [ "Die Königin", "Ihre Majestät" ]
# },
# "http://example.com/vocab/label" => [
# "Il re",
# %{ "@value" => "The king", "@language" => "en" }
# ]
# },
# output: [
# %{
# "@id" => "http://example.com/queen",
# "http://example.com/vocab/label" => [
# %{"@value" => "Il re", "@language" => "it"},
# %{"@value" => "The king", "@language" => "en"},
# %{"@value" => "Die Königin", "@language" => "de"},
# %{"@value" => "Ihre Majestät", "@language" => "de"},
# %{"@value" => "The Queen", "@language" => "en"},
# ]
# }
# ]
# }
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -785,9 +870,9 @@ defmodule JSON.LD.ExpansionTest do
]
}
]))
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -808,7 +893,7 @@ defmodule JSON.LD.ExpansionTest do
"@id" => "http://example.com/annotationsTest",
"container" => %{
"en" => "The Queen",
"de" => [ "Die Königin", "Ihre Majestät" ]
"de" => ["Die Königin", "Ihre Majestät"]
}
},
output: [
@ -821,9 +906,9 @@ defmodule JSON.LD.ExpansionTest do
]
}
]
},
}
|> Enum.each(fn ({title, data}) ->
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.expand(data.input) == data.output
@ -868,7 +953,6 @@ defmodule JSON.LD.ExpansionTest do
},
exception: JSON.LD.ListOfListsError
},
"@reverse object with an @id property" => %{
input: Jason.decode!(~s({
"@id": "http://example/foo",
@ -876,7 +960,7 @@ defmodule JSON.LD.ExpansionTest do
"@id": "http://example/bar"
}
})),
exception: JSON.LD.InvalidReversePropertyMapError,
exception: JSON.LD.InvalidReversePropertyMapError
},
"colliding keywords" => %{
input: Jason.decode!(~s({
@ -887,10 +971,10 @@ defmodule JSON.LD.ExpansionTest do
"id": "http://example/foo",
"ID": "http://example/bar"
})),
exception: JSON.LD.CollidingKeywordsError,
exception: JSON.LD.CollidingKeywordsError
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert_raise data.exception, fn -> JSON.LD.expand(data.input) end
@ -900,24 +984,29 @@ defmodule JSON.LD.ExpansionTest do
describe "expand_value" do
setup do
context = JSON.LD.context(%{
"dc" => "http://purl.org/dc/terms/", # TODO: RDF::Vocab::DC.to_uri.to_s,
context =
JSON.LD.context(%{
# TODO: RDF::Vocab::DC.to_uri.to_s,
"dc" => "http://purl.org/dc/terms/",
"ex" => "http://example.org/",
"foaf" => "http://xmlns.com/foaf/0.1/", # TODO: RDF::Vocab::FOAF.to_uri.to_s,
# TODO: RDF::Vocab::FOAF.to_uri.to_s,
"foaf" => "http://xmlns.com/foaf/0.1/",
"xsd" => "http://www.w3.org/2001/XMLSchema#",
"foaf:age" => %{"@type" => "xsd:integer"},
"foaf:knows" => %{"@type" => "@id"},
"dc:created" => %{"@type" => "xsd:date"},
"ex:integer" => %{"@type" => "xsd:integer"},
"ex:double" => %{"@type" => "xsd:double"},
"ex:boolean" => %{"@type" => "xsd:boolean"},
"ex:boolean" => %{"@type" => "xsd:boolean"}
})
%{example_context: context}
end
~w(boolean integer string dateTime date time)
|> Enum.each(fn dt ->
@tag skip: "This seems to be RDF.rb specific. The @id keys are produced when value is an RDF::URI or RDF::Node. Do we need/want something similar?"
@tag skip:
"This seems to be RDF.rb specific. The @id keys are produced when value is an RDF::URI or RDF::Node. Do we need/want something similar?"
@tag dt: dt
test "expands datatype xsd:#{dt}", %{dt: dt, example_context: context} do
assert expand_value(context, "foo", apply(XSD, String.to_atom(dt), []) |> to_string) ==
@ -932,46 +1021,55 @@ defmodule JSON.LD.ExpansionTest do
"no IRI" => ["foo", "http://example.com/", %{"@value" => "http://example.com/"}],
"no term" => ["foo", "ex", %{"@value" => "ex"}],
"no prefix" => ["foo", "ex:suffix", %{"@value" => "ex:suffix"}],
"integer" => ["foaf:age", "54", %{"@value" => "54", "@type" => XSD.integer |> to_string}],
"date " => ["dc:created", "2011-12-27Z", %{"@value" => "2011-12-27Z", "@type" => XSD.date |> to_string}],
"integer" => ["foaf:age", "54", %{"@value" => "54", "@type" => XSD.integer() |> to_string}],
"date " => [
"dc:created",
"2011-12-27Z",
%{"@value" => "2011-12-27Z", "@type" => XSD.date() |> to_string}
],
"native boolean" => ["foo", true, %{"@value" => true}],
"native integer" => ["foo", 1, %{"@value" => 1}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1E1}],
# TODO: Do we really want to support the following? RDF.rb has another implementation and uses this function
# for its implementation of fromRdf, instead of the RDF to Object Conversion algorithm in the spec ...
# "native date" => ["foo", ~D[2011-12-27], %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
# "native time" => ["foo", ~T[10:11:12Z], %{"@value" => "10:11:12Z", "@type" => XSD.time |> to_string}],
# "native dateTime" =>["foo", DateTime.from_iso8601("2011-12-27T10:11:12Z") |> elem(1), %{"@value" => "2011-12-27T10:11:12Z", "@type" => XSD.dateTime |> to_string}],
# "rdf boolean" => ["foo", RDF::Literal(true), %{"@value" => "true", "@type" => RDF::XSD.boolean.to_s}],
# "rdf integer" => ["foo", RDF::Literal(1), %{"@value" => "1", "@type" => XSD.integer |> to_string],
# "rdf decimal" => ["foo", RDF::Literal::Decimal.new(1.1), %{"@value" => "1.1", "@type" => XSD.decimal |> to_string}],
# "rdf double" => ["foo", RDF::Literal::Double.new(1.1), %{"@value" => "1.1E0", "@type" => XSD.double |> to_string}],
# "rdf URI" => ["foo", RDF::URI("foo"), %{"@id" => "foo"}],
# "rdf date " => ["foo", RDF::Literal(Date.parse("2011-12-27")), %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
# "rdf nonNeg" => ["foo", RDF::Literal::NonNegativeInteger.new(1), %{"@value" => "1", "@type" => XSD.nonNegativeInteger |> to_string}],
# "rdf float" => ["foo", RDF::Literal::Float.new(1.0), %{"@value" => "1.0", "@type" => XSD.float |> to_string}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1e1}]
# TODO: Do we really want to support the following? RDF.rb has another implementation and uses this function
# for its implementation of fromRdf, instead of the RDF to Object Conversion algorithm in the spec ...
# "native date" => ["foo", ~D[2011-12-27], %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
# "native time" => ["foo", ~T[10:11:12Z], %{"@value" => "10:11:12Z", "@type" => XSD.time |> to_string}],
# "native dateTime" =>["foo", DateTime.from_iso8601("2011-12-27T10:11:12Z") |> elem(1), %{"@value" => "2011-12-27T10:11:12Z", "@type" => XSD.dateTime |> to_string}],
# "rdf boolean" => ["foo", RDF::Literal(true), %{"@value" => "true", "@type" => RDF::XSD.boolean.to_s}],
# "rdf integer" => ["foo", RDF::Literal(1), %{"@value" => "1", "@type" => XSD.integer |> to_string],
# "rdf decimal" => ["foo", RDF::Literal::Decimal.new(1.1), %{"@value" => "1.1", "@type" => XSD.decimal |> to_string}],
# "rdf double" => ["foo", RDF::Literal::Double.new(1.1), %{"@value" => "1.1E0", "@type" => XSD.double |> to_string}],
# "rdf URI" => ["foo", RDF::URI("foo"), %{"@id" => "foo"}],
# "rdf date " => ["foo", RDF::Literal(Date.parse("2011-12-27")), %{"@value" => "2011-12-27", "@type" => XSD.date |> to_string}],
# "rdf nonNeg" => ["foo", RDF::Literal::NonNegativeInteger.new(1), %{"@value" => "1", "@type" => XSD.nonNegativeInteger |> to_string}],
# "rdf float" => ["foo", RDF::Literal::Float.new(1.0), %{"@value" => "1.0", "@type" => XSD.float |> to_string}],
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [key, compacted, expanded], example_context: context} do
assert expand_value(context, key, compacted) == expanded
end
end)
# context "@language" do
# before(:each) {subject.default_language = "en"}
# context "@language" do
# before(:each) {subject.default_language = "en"}
%{
"no IRI" => ["foo", "http://example.com/", %{"@value" => "http://example.com/", "@language" => "en"}],
"no IRI" => [
"foo",
"http://example.com/",
%{"@value" => "http://example.com/", "@language" => "en"}
],
"no term" => ["foo", "ex", %{"@value" => "ex", "@language" => "en"}],
"no prefix" => ["foo", "ex:suffix", %{"@value" => "ex:suffix", "@language" => "en"}],
"native boolean" => ["foo", true, %{"@value" => true}],
"native integer" => ["foo", 1, %{"@value" => 1}],
"native double" => ["foo", 1.1, %{"@value" => 1.1}],
"native double" => ["foo", 1.1, %{"@value" => 1.1}]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
# TODO
# @tag skip: "Do these errors originate from the differing context setup?"
@tag skip: "Why does this produce @language tags in RDF.rb, although no term definition of foo exists? Is this also RDF.rb specific?"
# @tag skip: "Do these errors originate from the differing context setup?"
@tag skip:
"Why does this produce @language tags in RDF.rb, although no term definition of foo exists? Is this also RDF.rb specific?"
@tag data: data
test "@language #{title}", %{data: [key, compacted, expanded], example_context: context} do
assert expand_value(context, key, compacted) == expanded
@ -979,25 +1077,68 @@ defmodule JSON.LD.ExpansionTest do
end)
%{
"boolean-boolean" => ["ex:boolean", true, %{"@value" => true, "@type" => XSD.boolean |> to_string}],
"boolean-integer" => ["ex:integer", true, %{"@value" => true, "@type" => XSD.integer |> to_string}],
"boolean-double" => ["ex:double", true, %{"@value" => true, "@type" => XSD.double |> to_string}],
"double-boolean" => ["ex:boolean", 1.1, %{"@value" => 1.1, "@type" => XSD.boolean |> to_string}],
"double-double" => ["ex:double", 1.1, %{"@value" => 1.1, "@type" => XSD.double |> to_string}],
"double-integer" => ["foaf:age", 1.1, %{"@value" => 1.1, "@type" => XSD.integer |> to_string}],
"integer-boolean" => ["ex:boolean", 1, %{"@value" => 1, "@type" => XSD.boolean |> to_string}],
"integer-double" => ["ex:double", 1, %{"@value" => 1, "@type" => XSD.double |> to_string}],
"integer-integer" => ["foaf:age", 1, %{"@value" => 1, "@type" => XSD.integer |> to_string}],
"string-boolean" => ["ex:boolean", "foo", %{"@value" => "foo", "@type" => XSD.boolean |> to_string}],
"string-double" => ["ex:double", "foo", %{"@value" => "foo", "@type" => XSD.double |> to_string}],
"string-integer" => ["foaf:age", "foo", %{"@value" => "foo", "@type" => XSD.integer |> to_string}],
"boolean-boolean" => [
"ex:boolean",
true,
%{"@value" => true, "@type" => XSD.boolean() |> to_string}
],
"boolean-integer" => [
"ex:integer",
true,
%{"@value" => true, "@type" => XSD.integer() |> to_string}
],
"boolean-double" => [
"ex:double",
true,
%{"@value" => true, "@type" => XSD.double() |> to_string}
],
"double-boolean" => [
"ex:boolean",
1.1,
%{"@value" => 1.1, "@type" => XSD.boolean() |> to_string}
],
"double-double" => [
"ex:double",
1.1,
%{"@value" => 1.1, "@type" => XSD.double() |> to_string}
],
"double-integer" => [
"foaf:age",
1.1,
%{"@value" => 1.1, "@type" => XSD.integer() |> to_string}
],
"integer-boolean" => [
"ex:boolean",
1,
%{"@value" => 1, "@type" => XSD.boolean() |> to_string}
],
"integer-double" => ["ex:double", 1, %{"@value" => 1, "@type" => XSD.double() |> to_string}],
"integer-integer" => [
"foaf:age",
1,
%{"@value" => 1, "@type" => XSD.integer() |> to_string}
],
"string-boolean" => [
"ex:boolean",
"foo",
%{"@value" => "foo", "@type" => XSD.boolean() |> to_string}
],
"string-double" => [
"ex:double",
"foo",
%{"@value" => "foo", "@type" => XSD.double() |> to_string}
],
"string-integer" => [
"foaf:age",
"foo",
%{"@value" => "foo", "@type" => XSD.integer() |> to_string}
]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test "coercion #{title}", %{data: [key, compacted, expanded], example_context: context} do
assert expand_value(context, key, compacted) == expanded
end
end)
end
end

View File

@ -4,7 +4,8 @@ defmodule JSON.LD.FlatteningTest do
alias RDF.NS.RDFS
test "Flattened form of a JSON-LD document (EXAMPLE 60 and 61 of https://www.w3.org/TR/json-ld/#flattened-document-form)" do
input = Jason.decode! """
input =
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -22,8 +23,10 @@ defmodule JSON.LD.FlatteningTest do
}
]
}
"""
assert JSON.LD.flatten(input, input) == Jason.decode! """
""")
assert JSON.LD.flatten(input, input) ==
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -48,10 +51,9 @@ defmodule JSON.LD.FlatteningTest do
}
]
}
"""
""")
end
%{
"single object" => %{
input: %{"@id" => "http://example.com", "@type" => to_string(RDF.uri(RDFS.Resource))},
@ -66,16 +68,20 @@ defmodule JSON.LD.FlatteningTest do
},
"@id" => "http://greggkellogg.net/foaf",
"@type" => ["foaf:PersonalProfileDocument"],
"foaf:primaryTopic" => [%{
"foaf:primaryTopic" => [
%{
"@id" => "http://greggkellogg.net/foaf#me",
"@type" => ["foaf:Person"]
}]
}
]
},
output: [
%{
"@id" => "http://greggkellogg.net/foaf",
"@type" => ["http://xmlns.com/foaf/0.1/PersonalProfileDocument"],
"http://xmlns.com/foaf/0.1/primaryTopic" => [%{"@id" => "http://greggkellogg.net/foaf#me"}]
"http://xmlns.com/foaf/0.1/primaryTopic" => [
%{"@id" => "http://greggkellogg.net/foaf#me"}
]
},
%{
"@id" => "http://greggkellogg.net/foaf#me",
@ -107,7 +113,8 @@ defmodule JSON.LD.FlatteningTest do
]
},
"reverse properties" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
[
{
"@id": "http://example.com/people/markus",
@ -125,7 +132,8 @@ defmodule JSON.LD.FlatteningTest do
}
]
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
[
{
"@id": "http://example.com/people/dave",
@ -155,7 +163,8 @@ defmodule JSON.LD.FlatteningTest do
""")
},
"Simple named graph (Wikidata)" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
{
"@context": {
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
@ -187,7 +196,8 @@ defmodule JSON.LD.FlatteningTest do
]
}
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
[{
"@id": "http://example.org/ParisFact1",
"@type": ["http://www.w3.org/1999/02/22-rdf-syntax-ns#Graph"],
@ -212,7 +222,8 @@ defmodule JSON.LD.FlatteningTest do
""")
},
"Test Manifest (shortened)" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
{
"@id": "",
"http://example/sequence": {"@list": [
@ -224,7 +235,8 @@ defmodule JSON.LD.FlatteningTest do
]}
}
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
[{
"@id": "",
"http://example/sequence": [{"@list": [{"@id": "#t0001"}]}]
@ -237,7 +249,8 @@ defmodule JSON.LD.FlatteningTest do
options: %{}
},
"@reverse bnode issue (0045)" => %{
input: Jason.decode!("""
input:
Jason.decode!("""
{
"@context": {
"foo": "http://example.org/foo",
@ -247,7 +260,8 @@ defmodule JSON.LD.FlatteningTest do
"bar": [ "http://example.org/origin", "_:b0" ]
}
"""),
output: Jason.decode!("""
output:
Jason.decode!("""
[
{
"@id": "_:b0",
@ -266,11 +280,10 @@ defmodule JSON.LD.FlatteningTest do
options: %{}
}
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: data} do
assert JSON.LD.flatten(data.input) == data.output
end
end)
end

View File

@ -6,11 +6,13 @@ defmodule JSON.LD.IRICompactionTest do
alias RDF.NS.{XSD}
setup do
context = JSON.LD.context(%{
context =
JSON.LD.context(%{
"@base" => "http://base/",
"xsd" => "http://www.w3.org/2001/XMLSchema#",
"ex" => "http://example.org/",
"" => "http://empty/", # TODO: "Invalid JSON-LD syntax; a term cannot be an empty string."
# TODO: "Invalid JSON-LD syntax; a term cannot be an empty string."
"" => "http://empty/",
"_" => "http://underscore/",
"rex" => %{"@reverse" => "ex"},
"lex" => %{"@id" => "ex", "@language" => "en"},
@ -18,6 +20,7 @@ defmodule JSON.LD.IRICompactionTest do
"exp" => %{"@id" => "ex:pert"},
"experts" => %{"@id" => "ex:perts"}
})
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
@ -34,8 +37,11 @@ defmodule JSON.LD.IRICompactionTest do
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [result, input], example_context: context,
inverse_context: inverse_context} do
test title, %{
data: [result, input],
example_context: context,
inverse_context: inverse_context
} do
assert compact_iri(input, context, inverse_context) == result
end
end)
@ -53,8 +59,11 @@ defmodule JSON.LD.IRICompactionTest do
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [result, input], example_context: context,
inverse_context: inverse_context} do
test title, %{
data: [result, input],
example_context: context,
inverse_context: inverse_context
} do
assert compact_iri(input, context, inverse_context, nil, true) == result
end
end)
@ -78,25 +87,29 @@ defmodule JSON.LD.IRICompactionTest do
}
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [result, input], example_context: context,
inverse_context: inverse_context} do
test title, %{
data: [result, input],
example_context: context,
inverse_context: inverse_context
} do
assert compact_iri(input, context, inverse_context, nil, true) == result
end
end)
# TODO: we don't support 'position: :predicate'"
# test "does not use @vocab if it would collide with a term" do
# subject.set_mapping("name", "http://xmlns.com/foaf/0.1/name")
# subject.set_mapping("ex", nil)
# expect(subject.compact_iri("http://example.org/name", position: :predicate)).
# to produce("lex:name", logger)
# end
# TODO: we don't support 'position: :predicate'"
# test "does not use @vocab if it would collide with a term" do
# subject.set_mapping("name", "http://xmlns.com/foaf/0.1/name")
# subject.set_mapping("ex", nil)
# expect(subject.compact_iri("http://example.org/name", position: :predicate)).
# to produce("lex:name", logger)
# end
end
describe "with value" do
setup do
context = JSON.LD.context(%{
"xsd" => XSD.__base_iri__,
context =
JSON.LD.context(%{
"xsd" => XSD.__base_iri__(),
"plain" => "http://example.com/plain",
"lang" => %{"@id" => "http://example.com/lang", "@language" => "en"},
"bool" => %{"@id" => "http://example.com/bool", "@type" => "xsd:boolean"},
@ -105,36 +118,85 @@ defmodule JSON.LD.IRICompactionTest do
"date" => %{"@id" => "http://example.com/date", "@type" => "xsd:date"},
"id" => %{"@id" => "http://example.com/id", "@type" => "@id"},
"listplain" => %{"@id" => "http://example.com/plain", "@container" => "@list"},
"listlang" => %{"@id" => "http://example.com/lang", "@language" => "en", "@container" => "@list"},
"listbool" => %{"@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@list"},
"listinteger" => %{"@id" => "http://example.com/integer", "@type" => "xsd:integer", "@container" => "@list"},
"listdouble" => %{"@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@list"},
"listdate" => %{"@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@list"},
"listid" => %{"@id" => "http://example.com/id", "@type" => "@id", "@container" => "@list"},
"listlang" => %{
"@id" => "http://example.com/lang",
"@language" => "en",
"@container" => "@list"
},
"listbool" => %{
"@id" => "http://example.com/bool",
"@type" => "xsd:boolean",
"@container" => "@list"
},
"listinteger" => %{
"@id" => "http://example.com/integer",
"@type" => "xsd:integer",
"@container" => "@list"
},
"listdouble" => %{
"@id" => "http://example.com/double",
"@type" => "xsd:double",
"@container" => "@list"
},
"listdate" => %{
"@id" => "http://example.com/date",
"@type" => "xsd:date",
"@container" => "@list"
},
"listid" => %{
"@id" => "http://example.com/id",
"@type" => "@id",
"@container" => "@list"
},
"setplain" => %{"@id" => "http://example.com/plain", "@container" => "@set"},
"setlang" => %{"@id" => "http://example.com/lang", "@language" => "en", "@container" => "@set"},
"setbool" => %{"@id" => "http://example.com/bool", "@type" => "xsd:boolean", "@container" => "@set"},
"setinteger" => %{"@id" => "http://example.com/integer", "@type" => "xsd:integer", "@container" => "@set"},
"setdouble" => %{"@id" => "http://example.com/double", "@type" => "xsd:double", "@container" => "@set"},
"setdate" => %{"@id" => "http://example.com/date", "@type" => "xsd:date", "@container" => "@set"},
"setlang" => %{
"@id" => "http://example.com/lang",
"@language" => "en",
"@container" => "@set"
},
"setbool" => %{
"@id" => "http://example.com/bool",
"@type" => "xsd:boolean",
"@container" => "@set"
},
"setinteger" => %{
"@id" => "http://example.com/integer",
"@type" => "xsd:integer",
"@container" => "@set"
},
"setdouble" => %{
"@id" => "http://example.com/double",
"@type" => "xsd:double",
"@container" => "@set"
},
"setdate" => %{
"@id" => "http://example.com/date",
"@type" => "xsd:date",
"@container" => "@set"
},
"setid" => %{"@id" => "http://example.com/id", "@type" => "@id", "@container" => "@set"},
"langmap" => %{"@id" => "http://example.com/langmap", "@container" => "@language"},
"langmap" => %{"@id" => "http://example.com/langmap", "@container" => "@language"}
})
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
%{
"langmap" => %{"@value" => "en", "@language" => "en"},
#"plain" => %{"@value" => "foo"},
# "plain" => %{"@value" => "foo"},
"setplain" => %{"@value" => "foo", "@language" => "pl"}
}
|> Enum.each(fn {prop, value} ->
@tag data: {prop, value}
test "uses #{prop} for #{inspect value}",
%{data: {prop, value}, example_context: context,
inverse_context: inverse_context} do
assert compact_iri("http://example.com/#{String.replace(prop, "set", "")}",
context, inverse_context, value, true) == prop
test "uses #{prop} for #{inspect(value)}",
%{data: {prop, value}, example_context: context, inverse_context: inverse_context} do
assert compact_iri(
"http://example.com/#{String.replace(prop, "set", "")}",
context,
inverse_context,
value,
true
) == prop
end
end)
@ -148,69 +210,76 @@ defmodule JSON.LD.IRICompactionTest do
[%{"@value" => "de", "@language" => "de"}, %{"@value" => "jp", "@language" => "jp"}],
[%{"@value" => true}],
[%{"@value" => false}],
[%{"@value" => 1}], [%{"@value" => 1.1}],
[%{"@value" => 1}],
[%{"@value" => 1.1}]
],
"listlang" => [[%{"@value" => "en", "@language" => "en"}]],
"listbool" => [[%{"@value" => "true", "@type" => to_string(XSD.boolean)}]],
"listinteger" => [[%{"@value" => "1", "@type" => to_string(XSD.integer)}]],
"listdouble" => [[%{"@value" => "1", "@type" => to_string(XSD.double)}]],
"listdate" => [[%{"@value" => "2012-04-17", "@type" => to_string(XSD.date)}]],
"listbool" => [[%{"@value" => "true", "@type" => to_string(XSD.boolean())}]],
"listinteger" => [[%{"@value" => "1", "@type" => to_string(XSD.integer())}]],
"listdouble" => [[%{"@value" => "1", "@type" => to_string(XSD.double())}]],
"listdate" => [[%{"@value" => "2012-04-17", "@type" => to_string(XSD.date())}]]
}
|> Enum.each(fn {prop, values} ->
Enum.each values, fn value ->
Enum.each(values, fn value ->
@tag data: {prop, value}
test "for @list uses #{prop} for #{inspect %{"@list" => value}}",
%{data: {prop, value}, example_context: context,
inverse_context: inverse_context} do
assert compact_iri("http://example.com/#{String.replace(prop, "list", "")}",
context, inverse_context, %{"@list" => value}, true) == prop
end
test "for @list uses #{prop} for #{inspect(%{"@list" => value})}",
%{data: {prop, value}, example_context: context, inverse_context: inverse_context} do
assert compact_iri(
"http://example.com/#{String.replace(prop, "list", "")}",
context,
inverse_context,
%{"@list" => value},
true
) == prop
end
end)
end)
end
# describe "with :simple_compact_iris" do
# before(:each) { subject.instance_variable_get(:@options)[:simple_compact_iris] = true}
#
# %{
# "nil" => [nil, nil],
# "absolute IRI" => ["http://example.com/", "http://example.com/"],
# "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", "http://empty/suffix"],
# "unmapped" => ["foo", "foo"],
# "bnode" => ["_:a", RDF::Node("a")],
# "relative" => ["foo/bar", "http://base/foo/bar"],
# "odd CURIE" => ["ex:perts", "http://example.org/perts"]
# }.each do |title, (result, input)|
# test title do
# expect(subject.compact_iri(input)).to produce(result, logger)
# end
# end
#
# describe "and @vocab" do
# before(:each) { subject.vocab = "http://example.org/"}
#
# %{
# "absolute IRI" => ["http://example.com/", "http://example.com/"],
# "prefix:suffix" => ["suffix", "http://example.org/suffix"],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", "http://empty/suffix"],
# "unmapped" => ["foo", "foo"],
# "bnode" => ["_:a", RDF::Node("a")],
# "relative" => ["http://base/foo/bar", "http://base/foo/bar"],
# "odd CURIE" => ["experts", "http://example.org/perts"]
# }.each do |title, (result, input)|
# test title do
# expect(subject.compact_iri(input, vocab: true)).to produce(result, logger)
# end
# end
# end
# end
# describe "with :simple_compact_iris" do
# before(:each) { subject.instance_variable_get(:@options)[:simple_compact_iris] = true}
#
# %{
# "nil" => [nil, nil],
# "absolute IRI" => ["http://example.com/", "http://example.com/"],
# "prefix:suffix" => ["ex:suffix", "http://example.org/suffix"],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", "http://empty/suffix"],
# "unmapped" => ["foo", "foo"],
# "bnode" => ["_:a", RDF::Node("a")],
# "relative" => ["foo/bar", "http://base/foo/bar"],
# "odd CURIE" => ["ex:perts", "http://example.org/perts"]
# }.each do |title, (result, input)|
# test title do
# expect(subject.compact_iri(input)).to produce(result, logger)
# end
# end
#
# describe "and @vocab" do
# before(:each) { subject.vocab = "http://example.org/"}
#
# %{
# "absolute IRI" => ["http://example.com/", "http://example.com/"],
# "prefix:suffix" => ["suffix", "http://example.org/suffix"],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", "http://empty/suffix"],
# "unmapped" => ["foo", "foo"],
# "bnode" => ["_:a", RDF::Node("a")],
# "relative" => ["http://base/foo/bar", "http://base/foo/bar"],
# "odd CURIE" => ["experts", "http://example.org/perts"]
# }.each do |title, (result, input)|
# test title do
# expect(subject.compact_iri(input, vocab: true)).to produce(result, logger)
# end
# end
# end
# end
describe "compact-0018" do
setup do
context = JSON.LD.context(Jason.decode! """
context =
JSON.LD.context(
Jason.decode!("""
{
"id1": "http://example.com/id1",
"type1": "http://example.com/t1",
@ -245,10 +314,11 @@ defmodule JSON.LD.IRICompactionTest do
}
}
""")
)
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
%{
"term" => [
'{ "@value": "v0.1", "@language": "de" }',
@ -317,20 +387,21 @@ defmodule JSON.LD.IRICompactionTest do
{ "@value": "v5.6", "@type": "http://example.com/t2" }
]
}
""",
"""
}
|> Enum.each(fn {term, values} ->
values = if is_binary(values),
values =
if is_binary(values),
do: [values],
else: values
Enum.each(values, fn value ->
value = Jason.decode!(value)
@tag data: {term, value}
test "uses #{term} for #{inspect value, limit: 3}",
%{data: {term, value}, example_context: context,
inverse_context: inverse_context} do
assert compact_iri("http://example.com/term", context, inverse_context,
value, true) == term
test "uses #{term} for #{inspect(value, limit: 3)}",
%{data: {term, value}, example_context: context, inverse_context: inverse_context} do
assert compact_iri("http://example.com/term", context, inverse_context, value, true) ==
term
end
end)
end)
@ -338,38 +409,46 @@ defmodule JSON.LD.IRICompactionTest do
describe "compact-0020" do
setup do
context = JSON.LD.context(%{
context =
JSON.LD.context(%{
"ex" => "http://example.org/ns#",
"ex:property" => %{"@container" => "@list"}
})
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
@tag skip: "TODO: we don't support 'position: :subject'"
test "Compact @id that is a property IRI when @container is @list", %{
example_context: context, inverse_context: inverse_context} do
assert compact_iri("http://example.org/ns#property", context, inverse_context) == "ex:property"
# expect(ctx.compact_iri("http://example.org/ns#property", position: :subject)).
# to produce("ex:property", logger)
example_context: context,
inverse_context: inverse_context
} do
assert compact_iri("http://example.org/ns#property", context, inverse_context) ==
"ex:property"
# expect(ctx.compact_iri("http://example.org/ns#property", position: :subject)).
# to produce("ex:property", logger)
end
end
describe "compact-0041" do
setup do
context = JSON.LD.context(%{
context =
JSON.LD.context(%{
"name" => %{"@id" => "http://example.com/property", "@container" => "@list"}
})
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
test "Does not use @list with @index", %{
example_context: context, inverse_context: inverse_context} do
assert compact_iri("http://example.com/property", context, inverse_context,
%{
example_context: context,
inverse_context: inverse_context
} do
assert compact_iri("http://example.com/property", context, inverse_context, %{
"@list" => ["one item"],
"@index" => "an annotation"
}) == "http://example.com/property"
end
end
end

View File

@ -4,13 +4,15 @@ defmodule JSON.LD.IRIExpansionTest do
import JSON.LD.IRIExpansion
setup do
context = JSON.LD.context(%{
context =
JSON.LD.context(%{
"@base" => "http://base/",
"@vocab" => "http://vocab/",
"ex" => "http://example.org/",
"" => "http://empty/",
"_" => "http://underscore/"
})
%{example_context: context}
end
@ -19,21 +21,21 @@ defmodule JSON.LD.IRIExpansionTest do
end
describe "relative IRI with no options" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<foo>],
# "empty term" => ["", ~I<>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<_>],
# }
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<foo>],
# "empty term" => ["", ~I<>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<_>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "ex"],
@ -42,13 +44,12 @@ defmodule JSON.LD.IRIExpansionTest do
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "foo"],
"empty term" => ["", ""],
"another abs IRI"=>["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" =>
["foo:bar", "foo:bar"],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "_"],
"_" => ["_", "_"]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context) == result
@ -57,21 +58,21 @@ defmodule JSON.LD.IRIExpansionTest do
end
describe "relative IRI with base IRI" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://base/ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://base/foo>],
# "empty term" => ["", ~I<http://base/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<http://base/_>],
# }
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://base/ex>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://base/foo>],
# "empty term" => ["", ~I<http://base/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bnode("t0")],
# "_" => ["_", ~I<http://base/_>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "http://base/ex"],
@ -80,13 +81,12 @@ defmodule JSON.LD.IRIExpansionTest do
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "http://base/foo"],
"empty term" => ["", "http://base/"],
"another abs IRI"=>["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" =>
["foo:bar", "foo:bar"],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "http://base/_"],
"_" => ["_", "http://base/_"]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context, true) == result
@ -95,21 +95,21 @@ defmodule JSON.LD.IRIExpansionTest do
end
describe "relative IRI @vocab" do
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://example.org/>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://vocab/foo>],
# "empty term" => ["", ~I<http://empty/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bode("t0")],
# "_" => ["_", ~I<http://underscore/>],
# }
# TODO: Test this with RDF.URIs and RDF.BlankNodes
# %{
# "absolute IRI" => ["http://example.org/", ~I<http://example.org/>],
# "term" => ["ex", ~I<http://example.org/>],
# "prefix:suffix" => ["ex:suffix", ~I<http://example.org/suffix>],
# "keyword" => ["@type", "@type"],
# "empty" => [":suffix", ~I<http://empty/suffix>],
# "unmapped" => ["foo", ~I<http://vocab/foo>],
# "empty term" => ["", ~I<http://empty/>],
# "another abs IRI"=>["ex://foo", ~I<ex://foo>],
# "absolute IRI looking like a curie" =>
# ["foo:bar", ~I<foo:bar>],
# "bnode" => ["_:t0", RDF.bode("t0")],
# "_" => ["_", ~I<http://underscore/>],
# }
%{
"absolute IRI" => ["http://example.org/", "http://example.org/"],
"term" => ["ex", "http://example.org/"],
@ -118,18 +118,16 @@ defmodule JSON.LD.IRIExpansionTest do
"empty" => [":suffix", "http://empty/suffix"],
"unmapped" => ["foo", "http://vocab/foo"],
"empty term" => ["", "http://empty/"],
"another abs IRI"=>["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" =>
["foo:bar", "foo:bar"],
"another abs IRI" => ["ex://foo", "ex://foo"],
"absolute IRI looking like a curie" => ["foo:bar", "foo:bar"],
"bnode" => ["_:t0", "_:t0"],
"_" => ["_", "http://underscore/"],
"_" => ["_", "http://underscore/"]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [input, result], example_context: context} do
assert expand_iri(input, context, false, true) == result
end
end)
end
end

View File

@ -5,7 +5,7 @@ defmodule JSON.LD.RemoteContextTest do
setup_all do
local =
Jason.decode! """
Jason.decode!("""
{
"@context": {
"name": "http://xmlns.com/foaf/0.1/name",
@ -14,16 +14,16 @@ defmodule JSON.LD.RemoteContextTest do
"name": "Manu Sporny",
"homepage": "http://manu.sporny.org/"
}
"""
""")
remote =
Jason.decode! """
Jason.decode!("""
{
"@context": "http://example.com/test-context",
"name": "Manu Sporny",
"homepage": "http://manu.sporny.org/"
}
"""
""")
{:ok, local: local, remote: remote}
end
@ -42,13 +42,13 @@ defmodule JSON.LD.RemoteContextTest do
test "failed loading of remote context" do
remote =
Jason.decode! """
Jason.decode!("""
{
"@context": "http://fake.com/fake-context",
"name": "Manu Sporny",
"homepage": "http://manu.sporny.org/"
}
"""
""")
assert_raise LoadingRemoteContextFailedError, fn ->
JSON.LD.flatten(remote, nil, %Options{document_loader: DocumentLoader.Test})

View File

@ -17,5 +17,4 @@ defmodule JSON.LD.UtilsTest do
assert compact_iri_parts("_:bar") == nil
end
end
end

View File

@ -6,42 +6,69 @@ defmodule JSON.LD.ValueCompactionTest do
alias RDF.NS.{XSD}
setup do
context = JSON.LD.context(%{
"dc" => "http://purl.org/dc/terms/", # TODO: RDF::Vocab::DC.to_uri.to_s,
context =
JSON.LD.context(%{
# TODO: RDF::Vocab::DC.to_uri.to_s,
"dc" => "http://purl.org/dc/terms/",
"ex" => "http://example.org/",
"foaf" => "http://xmlns.com/foaf/0.1/", # TODO: RDF::Vocab::FOAF.to_uri.to_s,
"xsd" => to_string(XSD.__base_iri__),
# TODO: RDF::Vocab::FOAF.to_uri.to_s,
"foaf" => "http://xmlns.com/foaf/0.1/",
"xsd" => to_string(XSD.__base_iri__()),
"langmap" => %{"@id" => "http://example.com/langmap", "@container" => "@language"},
"list" => %{"@id" => "http://example.org/list", "@container" => "@list"},
"nolang" => %{"@id" => "http://example.org/nolang", "@language" => nil},
"dc:created" => %{"@type" => to_string(XSD.date)},
"foaf:age" => %{"@type" => to_string(XSD.integer)},
"foaf:knows" => %{"@type" => "@id"},
"dc:created" => %{"@type" => to_string(XSD.date())},
"foaf:age" => %{"@type" => to_string(XSD.integer())},
"foaf:knows" => %{"@type" => "@id"}
})
%{example_context: context, inverse_context: JSON.LD.Context.inverse(context)}
end
%{
"absolute IRI" => ["foaf:knows", "http://example.com/", %{"@id" => "http://example.com/"}],
"prefix:suffix" => ["foaf:knows", "ex:suffix", %{"@id" => "http://example.org/suffix"}],
"integer" => ["foaf:age", "54", %{"@value" => "54", "@type" => to_string(XSD.integer)}],
"date " => ["dc:created", "2011-12-27Z", %{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date)}],
"integer" => ["foaf:age", "54", %{"@value" => "54", "@type" => to_string(XSD.integer())}],
"date " => [
"dc:created",
"2011-12-27Z",
%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date())}
],
"no IRI" => ["foo", %{"@id" => "http://example.com/"}, %{"@id" => "http://example.com/"}],
"no IRI (CURIE)" => ["foo", %{"@id" => "http://xmlns.com/foaf/0.1/Person"}, %{"@id" => "http://xmlns.com/foaf/0.1/Person"}],
"no boolean" => ["foo", %{"@value" => "true", "@type" => to_string(XSD.boolean)},%{"@value" => "true", "@type" => to_string(XSD.boolean)}],
"no integer" => ["foo", %{"@value" => "54", "@type" => to_string(XSD.integer)},%{"@value" => "54", "@type" => to_string(XSD.integer)}],
"no date " => ["foo", %{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date)}, %{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date)}],
"no IRI (CURIE)" => [
"foo",
%{"@id" => "http://xmlns.com/foaf/0.1/Person"},
%{"@id" => "http://xmlns.com/foaf/0.1/Person"}
],
"no boolean" => [
"foo",
%{"@value" => "true", "@type" => to_string(XSD.boolean())},
%{"@value" => "true", "@type" => to_string(XSD.boolean())}
],
"no integer" => [
"foo",
%{"@value" => "54", "@type" => to_string(XSD.integer())},
%{"@value" => "54", "@type" => to_string(XSD.integer())}
],
"no date " => [
"foo",
%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date())},
%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date())}
],
"no string " => ["foo", "string", %{"@value" => "string"}],
"no lang " => ["nolang", "string", %{"@value" => "string"}],
"native boolean" => ["foo", true, %{"@value" => true}],
"native integer" => ["foo", 1, %{"@value" => 1}],
"native integer(list)"=>["list", 1, %{"@value" => 1}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1E1}],
"native integer(list)" => ["list", 1, %{"@value" => 1}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1e1}]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [key, compacted, expanded], example_context: context,
inverse_context: inverse_context} do
test title, %{
data: [key, compacted, expanded],
example_context: context,
inverse_context: inverse_context
} do
assert compact_value(expanded, context, inverse_context, key) == compacted
end
end)
@ -54,54 +81,80 @@ defmodule JSON.LD.ValueCompactionTest do
%{
"@id" => ["foo", %{"@id" => "foo"}, %{"@id" => "foo"}],
"integer" => ["foo", %{"@value" => "54", "@type" => to_string(XSD.integer)}, %{"@value" => "54", "@type" => to_string(XSD.integer)}],
"date" => ["foo", %{"@value" => "2011-12-27Z","@type" => to_string(XSD.date)},%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date)}],
"no lang" => ["foo", %{"@value" => "foo" }, %{"@value" => "foo"}],
"integer" => [
"foo",
%{"@value" => "54", "@type" => to_string(XSD.integer())},
%{"@value" => "54", "@type" => to_string(XSD.integer())}
],
"date" => [
"foo",
%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date())},
%{"@value" => "2011-12-27Z", "@type" => to_string(XSD.date())}
],
"no lang" => ["foo", %{"@value" => "foo"}, %{"@value" => "foo"}],
"same lang" => ["foo", "foo", %{"@value" => "foo", "@language" => "en"}],
"other lang" => ["foo", %{"@value" => "foo", "@language" => "bar"}, %{"@value" => "foo", "@language" => "bar"}],
"other lang" => [
"foo",
%{"@value" => "foo", "@language" => "bar"},
%{"@value" => "foo", "@language" => "bar"}
],
"langmap" => ["langmap", "en", %{"@value" => "en", "@language" => "en"}],
"no lang with @type coercion" => ["dc:created", %{"@value" => "foo"}, %{"@value" => "foo"}],
"no lang with @id coercion" => ["foaf:knows", %{"@value" => "foo"}, %{"@value" => "foo"}],
"no lang with @language=null" => ["nolang", "string", %{"@value" => "string"}],
"same lang with @type coercion" => ["dc:created", %{"@value" => "foo"}, %{"@value" => "foo"}],
"same lang with @type coercion" => [
"dc:created",
%{"@value" => "foo"},
%{"@value" => "foo"}
],
"same lang with @id coercion" => ["foaf:knows", %{"@value" => "foo"}, %{"@value" => "foo"}],
"other lang with @type coercion" => ["dc:created", %{"@value" => "foo", "@language" => "bar"}, %{"@value" => "foo", "@language" => "bar"}],
"other lang with @id coercion" => ["foaf:knows", %{"@value" => "foo", "@language" => "bar"}, %{"@value" => "foo", "@language" => "bar"}],
"other lang with @type coercion" => [
"dc:created",
%{"@value" => "foo", "@language" => "bar"},
%{"@value" => "foo", "@language" => "bar"}
],
"other lang with @id coercion" => [
"foaf:knows",
%{"@value" => "foo", "@language" => "bar"},
%{"@value" => "foo", "@language" => "bar"}
],
"native boolean" => ["foo", true, %{"@value" => true}],
"native integer" => ["foo", 1, %{"@value" => 1}],
"native integer(list)" => ["list", 1, %{"@value" => 1}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1E1}],
"native double" => ["foo", 1.1e1, %{"@value" => 1.1e1}]
}
|> Enum.each(fn ({title, data}) ->
|> Enum.each(fn {title, data} ->
@tag data: data
test title, %{data: [key, compacted, expanded], example_context: context,
inverse_context: inverse_context} do
test title, %{
data: [key, compacted, expanded],
example_context: context,
inverse_context: inverse_context
} do
assert compact_value(expanded, context, inverse_context, key) == compacted
end
end)
end
# TODO
# describe "keywords" do
# before(:each) do
# subject.set_mapping("id", "@id")
# subject.set_mapping("type", "@type")
# subject.set_mapping("list", "@list")
# subject.set_mapping("set", "@set")
# subject.set_mapping("language", "@language")
# subject.set_mapping("literal", "@value")
# end
#
# %{
# "@id" => [%{"id" => "http://example.com/"}, %{"@id" => "http://example.com/"}],
# "@type" => [%{"literal" => "foo", "type" => "http://example.com/"},
# %{"@value" => "foo", "@type" => "http://example.com/"}],
# "@value" => [%{"literal" => "foo", "language" => "bar"}, %{"@value" => "foo", "@language" => "bar"}],
# }.each do |title, (compacted, expanded)|
# test title do
# expect(subject.compact_value("foo", expanded)).to produce(compacted, logger)
# end
# end
# end
# TODO
# describe "keywords" do
# before(:each) do
# subject.set_mapping("id", "@id")
# subject.set_mapping("type", "@type")
# subject.set_mapping("list", "@list")
# subject.set_mapping("set", "@set")
# subject.set_mapping("language", "@language")
# subject.set_mapping("literal", "@value")
# end
#
# %{
# "@id" => [%{"id" => "http://example.com/"}, %{"@id" => "http://example.com/"}],
# "@type" => [%{"literal" => "foo", "type" => "http://example.com/"},
# %{"@value" => "foo", "@type" => "http://example.com/"}],
# "@value" => [%{"literal" => "foo", "language" => "bar"}, %{"@value" => "foo", "@language" => "bar"}],
# }.each do |title, (compacted, expanded)|
# test title do
# expect(subject.compact_value("foo", expanded)).to produce(compacted, logger)
# end
# end
# end
end