Fix warnings (#3)

Fixes most of the warnings. Leaves some warnings on functions marked TODO.
This commit is contained in:
Andrew Shu 2017-09-13 16:41:12 -07:00 committed by Marcel Otto
parent 235b9bce38
commit 244a4b34b2
9 changed files with 848 additions and 713 deletions

View file

@ -57,127 +57,144 @@ defmodule JSON.LD.Compaction do
defp do_compact(element, active_context, inverse_context, active_property, compact_arrays)
when is_map(element) do
# 4)
if (Map.has_key?(element, "@value") or Map.has_key?(element, "@id")) and
scalar?(result = compact_value(element, active_context, inverse_context, active_property)) do
result
if (Map.has_key?(element, "@value") or Map.has_key?(element, "@id")) do
result = compact_value(element, active_context, inverse_context, active_property)
if scalar?(result) do
result
else
do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays)
end
else
# 5)
inside_reverse = active_property == "@reverse"
# 6) + 7)
element
|> Enum.sort_by(fn {expanded_property , _} -> expanded_property end)
|> Enum.reduce(%{}, fn ({expanded_property, expanded_value}, result) ->
cond do
# 7.1)
expanded_property in ~w[@id @type] ->
# 7.1.1)
compacted_value =
if is_binary(expanded_value) do
compact_iri(expanded_value, active_context, inverse_context, nil,
expanded_property == "@type")
# 7.1.2)
else
# 7.1.2.1)
# TODO: RDF.rb calls also Array#compact
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
# 7.1.2.2)
|> Enum.reduce([], fn (expanded_type, compacted_value) ->
compacted_value ++
[compact_iri(expanded_type, active_context, inverse_context, nil, true)]
end)
# 7.1.2.3)
|> case(do: (
[compacted_value] -> compacted_value
compacted_value -> compacted_value))
end
# 7.1.3)
alias = compact_iri(expanded_property, active_context, inverse_context, nil, true)
# 7.1.4)
Map.put(result, alias, compacted_value)
do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays)
end
end
# 7.2)
expanded_property == "@reverse" ->
# 7.2.1)
compacted_value = do_compact(expanded_value, active_context, inverse_context, "@reverse")
# 7.2.2)
{compacted_value, result} =
Enum.reduce compacted_value, {%{}, result},
fn ({property, value}, {compacted_value, result}) ->
term_def = active_context.term_defs[property]
# 7.2.2.1)
if term_def && term_def.reverse_property do
# 7.2.2.1.1)
defp do_compact_non_scalar(element, active_context, inverse_context, active_property, compact_arrays) do
# 5)
inside_reverse = active_property == "@reverse"
# 6) + 7)
element
|> Enum.sort_by(fn {expanded_property , _} -> expanded_property end)
|> Enum.reduce(%{}, fn ({expanded_property, expanded_value}, result) ->
cond do
# 7.1)
expanded_property in ~w[@id @type] ->
# 7.1.1)
compacted_value =
if is_binary(expanded_value) do
compact_iri(expanded_value, active_context, inverse_context, nil,
expanded_property == "@type")
# 7.1.2)
else
# 7.1.2.1)
# TODO: RDF.rb calls also Array#compact
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
# 7.1.2.2)
|> Enum.reduce([], fn (expanded_type, compacted_value) ->
compacted_value ++
[compact_iri(expanded_type, active_context, inverse_context, nil, true)]
end)
# 7.1.2.3)
|> case(do: (
[compacted_value] -> compacted_value
compacted_value -> compacted_value))
end
# 7.1.3)
alias = compact_iri(expanded_property, active_context, inverse_context, nil, true)
# 7.1.4)
Map.put(result, alias, compacted_value)
# 7.2)
expanded_property == "@reverse" ->
# 7.2.1)
compacted_value = do_compact(expanded_value, active_context, inverse_context, "@reverse")
# 7.2.2)
{compacted_value, result} =
Enum.reduce compacted_value, {%{}, result},
fn ({property, value}, {compacted_value, result}) ->
term_def = active_context.term_defs[property]
# 7.2.2.1)
if term_def && term_def.reverse_property do
# 7.2.2.1.1)
value =
if (!compact_arrays or term_def.container_mapping == "@set") and
!is_list(value) do
value = [value]
[value]
else
value
end
# 7.2.2.1.2) + 7.2.2.1.3)
{compacted_value, merge_compacted_value(result, property, value)}
else
{Map.put(compacted_value, property, value), result}
end
# 7.2.2.1.2) + 7.2.2.1.3)
{compacted_value, merge_compacted_value(result, property, value)}
else
{Map.put(compacted_value, property, value), result}
end
# 7.2.3)
unless Enum.empty?(compacted_value) do
# 7.2.3.1)
alias = compact_iri("@reverse", active_context, inverse_context, nil, true)
# 7.2.3.2)
Map.put(result, alias, compacted_value)
else
result
end
# 7.3)
expanded_property == "@index" &&
(term_def = active_context.term_defs[active_property]) &&
term_def.container_mapping == "@index" ->
end
# 7.2.3)
unless Enum.empty?(compacted_value) do
# 7.2.3.1)
alias = compact_iri("@reverse", active_context, inverse_context, nil, true)
# 7.2.3.2)
Map.put(result, alias, compacted_value)
else
result
end
# 7.4)
expanded_property in ~w[@index @value @language] ->
# 7.4.1)
alias = compact_iri(expanded_property, active_context, inverse_context, nil, true)
# 7.4.2)
Map.put(result, alias, expanded_value)
# 7.3)
expanded_property == "@index" &&
active_context.term_defs[active_property] &&
active_context.term_defs[active_property].container_mapping == "@index" ->
result
true ->
# 7.5)
# 7.4)
expanded_property in ~w[@index @value @language] ->
# 7.4.1)
alias = compact_iri(expanded_property, active_context, inverse_context, nil, true)
# 7.4.2)
Map.put(result, alias, expanded_value)
true ->
# 7.5)
result =
if expanded_value == [] do
# 7.5.1)
item_active_property =
compact_iri(expanded_property, active_context, inverse_context,
expanded_value, true, inside_reverse)
# 7.5.2)
result = Map.update(result, item_active_property, [], fn
Map.update(result, item_active_property, [], fn
value when not is_list(value) -> [value]
value -> value
end)
else
result
end
# 7.6)
Enum.reduce(expanded_value, result, fn (expanded_item, result) ->
# 7.6.1)
item_active_property =
compact_iri(expanded_property, active_context, inverse_context,
expanded_item, true, inside_reverse)
# 7.6)
Enum.reduce(expanded_value, result, fn (expanded_item, result) ->
# 7.6.1)
item_active_property =
compact_iri(expanded_property, active_context, inverse_context,
expanded_item, true, inside_reverse)
# 7.6.2)
term_def = active_context.term_defs[item_active_property]
container = (term_def && term_def.container_mapping) || nil
# 7.6.2)
term_def = active_context.term_defs[item_active_property]
container = (term_def && term_def.container_mapping) || nil
# 7.6.3)
value = (is_map(expanded_item) && expanded_item["@list"]) || expanded_item
compacted_item =
do_compact(value, active_context, inverse_context,
item_active_property, compact_arrays)
# 7.6.3)
value = (is_map(expanded_item) && expanded_item["@list"]) || expanded_item
compacted_item =
do_compact(value, active_context, inverse_context,
item_active_property, compact_arrays)
# 7.6.4)
# 7.6.4)
compacted_item =
if list?(expanded_item) do
# 7.6.4.1)
unless is_list(compacted_item),
do: compacted_item = [compacted_item]
compacted_item =
unless is_list(compacted_item),
do: [compacted_item], else: compacted_item
# 7.6.4.2)
unless container == "@list" do
# 7.6.4.2.1)
@ -186,41 +203,51 @@ defmodule JSON.LD.Compaction do
compact_iri("@list", active_context, inverse_context, nil, true) =>
compacted_item}
# 7.6.4.2.2)
if Map.has_key?(expanded_item, "@index") do
compacted_item = Map.put(compacted_item,
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
compact_iri("@index", active_context, inverse_context, nil, true),
expanded_item["@index"])
end
if Map.has_key?(expanded_item, "@index") do
Map.put(compacted_item,
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
compact_iri("@index", active_context, inverse_context, nil, true),
expanded_item["@index"])
else
compacted_item
end
# 7.6.4.3)
else
if Map.has_key?(result, item_active_property),
do: raise JSON.LD.CompactionToListOfListsError,
if Map.has_key?(result, item_active_property) do
raise JSON.LD.CompactionToListOfListsError,
message: "The compacted document contains a list of lists as multiple lists have been compacted to the same term."
else
compacted_item
end
end
else
compacted_item
end
# 7.6.5)
if container in ~w[@language @index] do
map_object = result[item_active_property] || %{}
# 7.6.5)
if container in ~w[@language @index] do
map_object = result[item_active_property] || %{}
compacted_item =
if container == "@language" and
is_map(compacted_item) and Map.has_key?(compacted_item, "@value"),
do: compacted_item = compacted_item["@value"]
map_key = expanded_item[container]
map_object = merge_compacted_value(map_object, map_key, compacted_item)
Map.put(result, item_active_property, map_object)
do: compacted_item["@value"],
else: compacted_item
map_key = expanded_item[container]
map_object = merge_compacted_value(map_object, map_key, compacted_item)
Map.put(result, item_active_property, map_object)
# 7.6.6)
else
# 7.6.6)
else
compacted_item =
if !is_list(compacted_item) and (!compact_arrays or
container in ~w[@set @list] or expanded_property in ~w[@list @graph]),
do: compacted_item = [compacted_item]
merge_compacted_value(result, item_active_property, compacted_item)
end
end)
end
end)
end
do: [compacted_item],
else: compacted_item
merge_compacted_value(result, item_active_property, compacted_item)
end
end)
end
end)
end
defp merge_compacted_value(map, key, value) do
@ -252,138 +279,161 @@ defmodule JSON.LD.Compaction do
# 2) If vocab is true and iri is a key in inverse context:
term = if vocab && Map.has_key?(inverse_context, iri) do
# 2.1) Initialize default language to active context's default language, if it has one, otherwise to @none.
default_language = active_context.default_language || "@none"
# default_language = active_context.default_language || "@none"
# 2.3) Initialize type/language to @language, and type/language value to @null. These two variables will keep track of the preferred type mapping or language mapping for a term, based on what is compatible with value.
type_language = "@language"
type_language_value = "@null"
# 2.2) Initialize containers to an empty array. This array will be used to keep track of an ordered list of preferred container mappings for a term, based on what is compatible with value.
# 2.4) If value is a JSON object that contains the key @index, then append the value @index to containers.
containers = if index?(value), do: ["@index"], else: []
cond do
# 2.5) If reverse is true, set type/language to @type, type/language value to @reverse, and append @set to containers.
reverse ->
containers = containers ++ ["@set"]
type_language = "@type"
type_language_value = "@reverse"
# 2.6) Otherwise, if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows:
list?(value) ->
# 2.6.1) If @index is a not key in value, then append @list to containers.
if not index?(value),
do: containers = containers ++ ["@list"]
# 2.6.2) Initialize list to the array associated with the key @list in value.
list = value["@list"]
# 2.6.3) Initialize common type and common language to null. If list is empty, set common language to default language.
{common_type, common_language} = {nil, nil}
if Enum.empty?(list) do
common_language = default_language
else
# 2.6.4) For each item in list:
{common_type, common_language} = Enum.reduce_while list, {common_type, common_language},
fn (item, {common_type, common_language}) ->
# 2.6.4.1) Initialize item language to @none and item type to @none.
{item_type, item_language} = {"@none", "@none"}
# 2.6.4.2) If item contains the key @value:
if Map.has_key?(item, "@value") do
cond do
# 2.6.4.2.1) If item contains the key @language, then set item language to its associated value.
Map.has_key?(item, "@language") ->
item_language = item["@language"]
# 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value.
Map.has_key?(item, "@type") ->
item_type = item["@type"]
# 2.6.4.2.3) Otherwise, set item language to @null.
true ->
item_language = "@null"
end
# 2.6.4.3) Otherwise, set item type to @id.
else
item_type = "@id"
end
cond do
# 2.6.4.4) If common language is null, set it to item language.
is_nil(common_language) ->
common_language = item_language
# 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages.
item_language != common_language and Map.has_key?(item, "@value") ->
common_language = "@none"
true ->
end
cond do
# 2.6.4.6) If common type is null, set it to item type.
is_nil(common_type) ->
common_type = item_type
# 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types.
item_type != common_type ->
common_type = "@none"
true ->
end
# 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items.
if common_language == "@none" and common_type == "@none" do
{:halt, {common_type, common_language}}
else
{:cont, {common_type, common_language}}
end
end
# 2.6.5) If common language is null, set it to @none.
if is_nil(common_language), do: common_language = "@none"
# 2.6.6) If common type is null, set it to @none.
if is_nil(common_type), do: common_type = "@none"
# 2.6.7) If common type is not @none then set type/language to @type and type/language value to common type.
if common_type != "@none" do
type_language = "@type"
type_language_value = common_type
# 2.6.8) Otherwise, set type/language value to common language.
else
type_language_value = common_language
end
end
# 2.7) Otherwise
true ->
# 2.7.1) If value is a value object:
if is_map(value) and Map.has_key?(value, "@value") do
# 2.7.1.1) If value contains the key @language and does not contain the key @index, then set type/language value to its associated value and append @language to containers.
if Map.has_key?(value, "@language") and not Map.has_key?(value, "@index") do
type_language_value = value["@language"]
containers = containers ++ ["@language"]
else
# 2.7.1.2) Otherwise, if value contains the key @type, then set type/language value to its associated value and set type/language to @type.
if Map.has_key?(value, "@type") do
type_language_value = value["@type"]
type_language = "@type"
end
end
# 2.7.2) Otherwise, set type/language to @type and set type/language value to @id.
else
{containers, type_language, type_language_value} =
cond do
# 2.5) If reverse is true, set type/language to @type, type/language value to @reverse, and append @set to containers.
reverse ->
containers = containers ++ ["@set"]
type_language = "@type"
type_language_value = "@id"
end
# 2.7.3) Append @set to containers.
containers = containers ++ ["@set"]
end
type_language_value = "@reverse"
{containers, type_language, type_language_value}
# 2.6) Otherwise, if value is a list object, then set type/language and type/language value to the most specific values that work for all items in the list as follows:
list?(value) ->
# 2.6.1) If @index is a not key in value, then append @list to containers.
containers =
if not index?(value),
do: containers ++ ["@list"], else: containers
# 2.6.2) Initialize list to the array associated with the key @list in value.
list = value["@list"]
# 2.6.3) Initialize common type and common language to null. If list is empty, set common language to default language.
{common_type, common_language} = {nil, nil}
{type_language, type_language_value} =
if Enum.empty?(list) do
# common_language = default_language
{type_language, type_language_value}
else
# 2.6.4) For each item in list:
{common_type, common_language} = Enum.reduce_while list, {common_type, common_language},
fn (item, {common_type, common_language}) ->
# 2.6.4.1) Initialize item language to @none and item type to @none.
{item_type, item_language} = {"@none", "@none"}
# 2.6.4.2) If item contains the key @value:
{item_type, item_language} =
if Map.has_key?(item, "@value") do
cond do
# 2.6.4.2.1) If item contains the key @language, then set item language to its associated value.
Map.has_key?(item, "@language") ->
{item_type, item["@language"]}
# 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value.
Map.has_key?(item, "@type") ->
{item["@type"], item_language}
# 2.6.4.2.3) Otherwise, set item language to @null.
true ->
{item_type, "@null"}
end
# 2.6.4.3) Otherwise, set item type to @id.
else
{"@id", item_language}
end
common_language =
cond do
# 2.6.4.4) If common language is null, set it to item language.
is_nil(common_language) ->
item_language
# 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages.
item_language != common_language and Map.has_key?(item, "@value") ->
"@none"
true ->
common_language
end
common_type =
cond do
# 2.6.4.6) If common type is null, set it to item type.
is_nil(common_type) ->
item_type
# 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types.
item_type != common_type ->
"@none"
true ->
common_type
end
# 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items.
if common_language == "@none" and common_type == "@none" do
{:halt, {common_type, common_language}}
else
{:cont, {common_type, common_language}}
end
end
# 2.6.5) If common language is null, set it to @none.
common_language = if is_nil(common_language), do: "@none", else: common_language
# 2.6.6) If common type is null, set it to @none.
common_type = if is_nil(common_type), do: "@none", else: common_type
# 2.6.7) If common type is not @none then set type/language to @type and type/language value to common type.
if common_type != "@none" do
type_language = "@type"
type_language_value = common_type
{type_language, type_language_value}
# 2.6.8) Otherwise, set type/language value to common language.
else
type_language_value = common_language
{type_language, type_language_value}
end
end
{containers, type_language, type_language_value}
# 2.7) Otherwise
true ->
# 2.7.1) If value is a value object:
{containers, type_language, type_language_value} =
if is_map(value) and Map.has_key?(value, "@value") do
# 2.7.1.1) If value contains the key @language and does not contain the key @index, then set type/language value to its associated value and append @language to containers.
if Map.has_key?(value, "@language") and not Map.has_key?(value, "@index") do
type_language_value = value["@language"]
containers = containers ++ ["@language"]
{containers, type_language, type_language_value}
else
# 2.7.1.2) Otherwise, if value contains the key @type, then set type/language value to its associated value and set type/language to @type.
if Map.has_key?(value, "@type") do
type_language_value = value["@type"]
type_language = "@type"
{containers, type_language, type_language_value}
else
{containers, type_language, type_language_value}
end
end
# 2.7.2) Otherwise, set type/language to @type and set type/language value to @id.
else
type_language = "@type"
type_language_value = "@id"
{containers, type_language, type_language_value}
end
# 2.7.3) Append @set to containers.
containers = containers ++ ["@set"]
{containers, type_language, type_language_value}
end
# 2.8) Append @none to containers. This represents the non-existence of a container mapping, and it will be the last container mapping value to be checked as it is the most generic.
containers = containers ++ ["@none"]
# 2.9) If type/language value is null, set it to @null. This is the key under which null values are stored in the inverse context entry.
if is_nil(type_language_value), do: type_language_value = "@null"
type_language_value = if is_nil(type_language_value), do: "@null", else: type_language_value
# 2.10) Initialize preferred values to an empty array. This array will indicate, in order, the preferred values for a term's type mapping or language mapping.
preferred_values = []
# 2.11) If type/language value is @reverse, append @reverse to preferred values.
if type_language_value == "@reverse",
do: preferred_values = preferred_values ++ ["@reverse"]
preferred_values =
if type_language_value == "@reverse",
do: preferred_values ++ ["@reverse"],
else: preferred_values
# 2.12) If type/language value is @id or @reverse and value has an @id member:
if type_language_value in ~w[@id @reverse] and is_map(value) and Map.has_key?(value, "@id") do
# 2.12.1) If the result of using the IRI compaction algorithm, passing active context, inverse context, the value associated with the @id key in value for iri, true for vocab, and true for document relative has a term definition in the active context with an IRI mapping that equals the value associated with the @id key in value, then append @vocab, @id, and @none, in that order, to preferred values.
# TODO: Spec fixme? document_relative is not a specified parameter of compact_iri
compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true)
if (term_def = active_context.term_defs[compact_id]) && term_def.iri_mapping == value["@id"] do
preferred_values = preferred_values ++ ~w[@vocab @id @none]
# 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to preferred values.
preferred_values =
if type_language_value in ~w[@id @reverse] and is_map(value) and Map.has_key?(value, "@id") do
# 2.12.1) If the result of using the IRI compaction algorithm, passing active context, inverse context, the value associated with the @id key in value for iri, true for vocab, and true for document relative has a term definition in the active context with an IRI mapping that equals the value associated with the @id key in value, then append @vocab, @id, and @none, in that order, to preferred values.
# TODO: Spec fixme? document_relative is not a specified parameter of compact_iri
compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true)
if (term_def = active_context.term_defs[compact_id]) && term_def.iri_mapping == value["@id"] do
preferred_values ++ ~w[@vocab @id @none]
# 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to preferred values.
else
preferred_values ++ ~w[@id @vocab @none]
end
# 2.13) Otherwise, append type/language value and @none, in that order, to preferred values.
else
preferred_values = preferred_values ++ ~w[@id @vocab @none]
preferred_values ++ [type_language_value, "@none"]
end
# 2.13) Otherwise, append type/language value and @none, in that order, to preferred values.
else
preferred_values = preferred_values ++ [type_language_value, "@none"]
end
# 2.14) Initialize term to the result of the Term Selection algorithm, passing inverse context, iri, containers, type/language, and preferred values.
select_term(inverse_context, iri, containers, type_language, preferred_values)
end
@ -393,50 +443,56 @@ defmodule JSON.LD.Compaction do
term
# 3) At this point, there is no simple term that iri can be compacted to. If vocab is true and active context has a vocabulary mapping:
# 3.1) If iri begins with the vocabulary mapping's value but is longer, then initialize suffix to the substring of iri that does not match. If suffix does not have a term definition in active context, then return suffix.
vocab && active_context.vocab &&
String.starts_with?(iri, active_context.vocab) &&
(suffix = String.replace_prefix(iri, active_context.vocab, "")) != "" &&
is_nil(active_context.term_defs[suffix]) ->
suffix
true ->
# 4) The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any.
compact_iri =
# 5) For each key term and value term definition in the active context:
Enum.reduce(active_context.term_defs, nil, fn ({term, term_def}, compact_iri) ->
cond do
# 5.1) If the term contains a colon (:), then continue to the next term because terms with colons can't be used as prefixes.
String.contains?(term, ":") ->
compact_iri
# 5.2) If the term definition is null, its IRI mapping equals iri, or its IRI mapping is not a substring at the beginning of iri, the term cannot be used as a prefix because it is not a partial match with iri. Continue with the next term.
is_nil(term_def) || term_def.iri_mapping == iri ||
not String.starts_with?(iri, term_def.iri_mapping) ->
compact_iri
true ->
# 5.3) Initialize candidate by concatenating term, a colon (:), and the substring of iri that follows after the value of the term definition's IRI mapping.
candidate = term <> ":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1))
# 5.4) If either compact IRI is null or candidate is shorter or the same length but lexicographically less than compact IRI and candidate does not have a term definition in active context or if the term definition has an IRI mapping that equals iri and value is null, set compact IRI to candidate.
# TODO: Spec fixme: The specified expression is pretty ambiguous without brackets ...
# TODO: Spec fixme: "if the term definition has an IRI mapping that equals iri" is already catched in 5.2, so will never happen here ...
if (is_nil(compact_iri) or shortest_or_least?(candidate, compact_iri)) and
(is_nil(active_context.term_defs[candidate]) or
(active_context.term_defs[candidate].iri_mapping == iri and is_nil(value))) do
candidate
else
compact_iri
end
end
end)
cond do
# 6) If compact IRI is not null, return compact IRI.
not is_nil(compact_iri) ->
compact_iri
# 7) If vocab is false then transform iri to a relative IRI using the document's base IRI.
not vocab ->
remove_base(iri, Context.base(active_context))
# 8) Finally, return iri as is.
true ->
iri
vocab && active_context.vocab && String.starts_with?(iri, active_context.vocab) ->
suffix = String.replace_prefix(iri, active_context.vocab, "")
if suffix != "" && is_nil(active_context.term_defs[suffix]) do
String.replace_prefix(iri, active_context.vocab, "")
else
create_compact_iri(iri, active_context, value, vocab)
end
true ->
create_compact_iri(iri, active_context, value, vocab)
end
end
defp create_compact_iri(iri, active_context, value, vocab) do
# 4) The iri could not be compacted using the active context's vocabulary mapping. Try to create a compact IRI, starting by initializing compact IRI to null. This variable will be used to tore the created compact IRI, if any.
compact_iri =
# 5) For each key term and value term definition in the active context:
Enum.reduce(active_context.term_defs, nil, fn ({term, term_def}, compact_iri) ->
cond do
# 5.1) If the term contains a colon (:), then continue to the next term because terms with colons can't be used as prefixes.
String.contains?(term, ":") ->
compact_iri
# 5.2) If the term definition is null, its IRI mapping equals iri, or its IRI mapping is not a substring at the beginning of iri, the term cannot be used as a prefix because it is not a partial match with iri. Continue with the next term.
is_nil(term_def) || term_def.iri_mapping == iri ||
not String.starts_with?(iri, term_def.iri_mapping) ->
compact_iri
true ->
# 5.3) Initialize candidate by concatenating term, a colon (:), and the substring of iri that follows after the value of the term definition's IRI mapping.
candidate = term <> ":" <> (String.split_at(iri, String.length(term_def.iri_mapping)) |> elem(1))
# 5.4) If either compact IRI is null or candidate is shorter or the same length but lexicographically less than compact IRI and candidate does not have a term definition in active context or if the term definition has an IRI mapping that equals iri and value is null, set compact IRI to candidate.
# TODO: Spec fixme: The specified expression is pretty ambiguous without brackets ...
# TODO: Spec fixme: "if the term definition has an IRI mapping that equals iri" is already catched in 5.2, so will never happen here ...
if (is_nil(compact_iri) or shortest_or_least?(candidate, compact_iri)) and
(is_nil(active_context.term_defs[candidate]) or
(active_context.term_defs[candidate].iri_mapping == iri and is_nil(value))) do
candidate
else
compact_iri
end
end
end)
cond do
# 6) If compact IRI is not null, return compact IRI.
not is_nil(compact_iri) ->
compact_iri
# 7) If vocab is false then transform iri to a relative IRI using the document's base IRI.
not vocab ->
remove_base(iri, Context.base(active_context))
# 8) Finally, return iri as is.
true ->
iri
end
end
@ -526,8 +582,9 @@ defmodule JSON.LD.Compaction do
value["@value"]
true ->
# 7) Otherwise, if number members equals 1 and either the value of the @value member is not a string, or the active context has no default language, or the language mapping of active property is set to null,, return the value associated with the @value member.
value_value = value["@value"]
if number_members == 1 and
(not is_binary(value_value = value["@value"]) or
(not is_binary(value_value) or
!active_context.default_language or
# TODO: Spec fixme: doesn't specify to check default language as well
Context.language(active_context, active_property) == nil) do

View file

@ -40,7 +40,7 @@ defmodule JSON.LD.Context do
# 3.1) If context is null, set result to a newly-initialized active context and continue with the next context. The base IRI of the active context is set to the IRI of the currently being processed document (which might be different from the currently being processed context), if available; otherwise to null. If set, the base option of a JSON-LD API Implementation overrides the base IRI.
defp do_update(%JSON.LD.Context{}, nil, remote, options) do
defp do_update(%JSON.LD.Context{}, nil, _remote, options) do
new(options)
end
@ -165,12 +165,16 @@ defmodule JSON.LD.Context do
do_create_type_definition(definition, active, local, value, defined)
{done, definition, active, defined} =
do_create_reverse_definition(definition, active, local, value, defined)
unless done do
{definition, active, defined} =
do_create_id_definition(definition, active, local, term, value, defined)
definition = do_create_container_definition(definition, value)
definition = do_create_language_definition(definition, value)
end
{definition, active, defined} =
unless done do
{definition, active, defined} =
do_create_id_definition(definition, active, local, term, value, defined)
definition = do_create_container_definition(definition, value)
definition = do_create_language_definition(definition, value)
{definition, active, defined}
else
{definition, active, defined}
end
# 18 / 11.6) Set the term definition of term in active context to definition and set the value associated with defined's key term to true.
{%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, definition)},
Map.put(defined, term, true)}
@ -215,20 +219,23 @@ defmodule JSON.LD.Context do
true -> # 11.3)
{expanded_reverse, active, defined} =
expand_iri(reverse, active, false, true, local, defined)
if IRI.absolute?(expanded_reverse) or blank_node_id?(expanded_reverse) do
definition = %TermDefinition{definition | iri_mapping: expanded_reverse}
else
raise JSON.LD.InvalidIRIMappingError,
message: "Non-absolute @reverse IRI: #{inspect reverse}"
end
case Map.get(value, "@container", {false}) do # 11.4)
{false} -> nil
container when is_nil(container) or container in ~w[@set @index] ->
definition = %TermDefinition{definition | container_mapping: container}
_ ->
raise JSON.LD.InvalidReversePropertyError,
message: "#{inspect reverse} is not a valid reverse property; reverse properties only support set- and index-containers"
end
definition =
if IRI.absolute?(expanded_reverse) or blank_node_id?(expanded_reverse) do
%TermDefinition{definition | iri_mapping: expanded_reverse}
else
raise JSON.LD.InvalidIRIMappingError,
message: "Non-absolute @reverse IRI: #{inspect reverse}"
end
definition =
case Map.get(value, "@container", {false}) do # 11.4)
{false} ->
definition
container when is_nil(container) or container in ~w[@set @index] ->
%TermDefinition{definition | container_mapping: container}
_ ->
raise JSON.LD.InvalidReversePropertyError,
message: "#{inspect reverse} is not a valid reverse property; reverse properties only support set- and index-containers"
end
# 11.5) & 11.6)
{true, %TermDefinition{definition | reverse_property: true}, active, defined}
end
@ -270,9 +277,13 @@ defmodule JSON.LD.Context do
if String.contains?(term, ":") do
case compact_iri_parts(term) do
[prefix, suffix] ->
if prefix_mapping = local[prefix] do
{active, defined} = do_create_term_definition(active, local, prefix, prefix_mapping, defined)
end
prefix_mapping = local[prefix]
{active, defined} =
if prefix_mapping do
do_create_term_definition(active, local, prefix, prefix_mapping, defined)
else
{active, defined}
end
if prefix_def = active.term_defs[prefix] do
{%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active, defined}
else
@ -343,25 +354,27 @@ defmodule JSON.LD.Context do
type_map = get_in(result, [iri, container, "@type"]) || %{}
language_map = get_in(result, [iri, container, "@language"]) || %{}
case term_def do
# 3.8) If the term definition indicates that the term represents a reverse property
%TermDefinition{reverse_property: true} ->
type_map = Map.put_new(type_map, "@reverse", term)
# 3.9) Otherwise, if term definition has a type mapping
%TermDefinition{type_mapping: type_mapping}
when type_mapping != false ->
type_map = Map.put_new(type_map, type_mapping, term)
# 3.10) Otherwise, if term definition has a language mapping (might be null)
%TermDefinition{language_mapping: language_mapping}
when language_mapping != false ->
language = language_mapping || "@null"
language_map = Map.put_new(language_map, language, term)
# 3.11) Otherwise
_ ->
language_map = Map.put_new(language_map, default_language, term)
language_map = Map.put_new(language_map, "@none", term)
type_map = Map.put_new(type_map, "@none", term)
end
{type_map, language_map} =
case term_def do
# 3.8) If the term definition indicates that the term represents a reverse property
%TermDefinition{reverse_property: true} ->
{Map.put_new(type_map, "@reverse", term), language_map}
# 3.9) Otherwise, if term definition has a type mapping
%TermDefinition{type_mapping: type_mapping}
when type_mapping != false ->
{Map.put_new(type_map, type_mapping, term), language_map}
# 3.10) Otherwise, if term definition has a language mapping (might be null)
%TermDefinition{language_mapping: language_mapping}
when language_mapping != false ->
language = language_mapping || "@null"
{type_map, Map.put_new(language_map, language, term)}
# 3.11) Otherwise
_ ->
language_map = Map.put_new(language_map, default_language, term)
language_map = Map.put_new(language_map, "@none", term)
type_map = Map.put_new(type_map, "@none", term)
{type_map, language_map}
end
result
|> Map.put_new(iri, %{})

View file

@ -37,8 +37,8 @@ defmodule JSON.LD.Decoder do
|> Enum.reduce(rdf_graph, fn ({property, values}, rdf_graph) ->
cond do
property == "@type" ->
Graph.add rdf_graph,
node_to_rdf(subject), RDF.NS.RDF.type,
Graph.add rdf_graph,
node_to_rdf(subject), RDF.NS.RDF.type,
Enum.map(values, &node_to_rdf/1)
JSON.LD.keyword?(property) ->
rdf_graph
@ -50,7 +50,7 @@ defmodule JSON.LD.Decoder do
true ->
Enum.reduce values, rdf_graph, fn
(%{"@list" => list}, rdf_graph) ->
with {list_triples, first} <-
with {list_triples, first} <-
list_to_rdf(list, node_id_map) do
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
@ -115,31 +115,37 @@ defmodule JSON.LD.Decoder do
defp object_to_rdf(%{"@value" => value} = item) do
datatype = item["@type"]
cond do
is_boolean(value) ->
value = value |> RDF.Boolean.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.boolean, else: datatype
is_float(value) or (is_number(value) and datatype == to_string(XSD.double)) ->
value = value |> RDF.Double.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.double, else: datatype
is_integer(value) or (is_number(value) and datatype == to_string(XSD.integer)) ->
value = value |> RDF.Integer.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.integer, else: datatype
is_nil(datatype) ->
datatype =
if Map.has_key?(item, "@language") do
RDF.langString
else
XSD.string
end
true ->
end
{value, datatype} =
cond do
is_boolean(value) ->
value = value |> RDF.Boolean.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.boolean, else: datatype
{value, datatype}
is_float(value) or (is_number(value) and datatype == to_string(XSD.double)) ->
value = value |> RDF.Double.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.double, else: datatype
{value, datatype}
is_integer(value) or (is_number(value) and datatype == to_string(XSD.integer)) ->
value = value |> RDF.Integer.new |> RDF.Literal.canonical |> RDF.Literal.lexical
datatype = if is_nil(datatype), do: XSD.integer, else: datatype
{value, datatype}
is_nil(datatype) ->
datatype =
if Map.has_key?(item, "@language") do
RDF.langString
else
XSD.string
end
{value, datatype}
true ->
{value, datatype}
end
RDF.Literal.new(value,
%{datatype: datatype, language: item["@language"], canonicalize: true})
end
defp list_to_rdf(list, node_id_map) do
{list_triples, first, last} =
{list_triples, first, last} =
list
|> Enum.reduce({[], nil, nil}, fn (item, {list_triples, first, last}) ->
case object_to_rdf(item) do
@ -148,7 +154,7 @@ defmodule JSON.LD.Decoder do
with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
if last do
{
list_triples ++
list_triples ++
[{last, RDF.NS.RDF.rest, bnode},
{bnode, RDF.NS.RDF.first, object}],
first,

View file

@ -4,9 +4,7 @@ defmodule JSON.LD.Encoder do
use RDF.Serialization.Encoder
import JSON.LD.Utils
alias RDF.{Dataset, Graph, IRI, BlankNode, Literal}
alias RDF.{IRI, BlankNode, Literal}
alias RDF.NS.{XSD}
@rdf_type to_string(RDF.NS.RDF.type)
@ -71,12 +69,12 @@ defmodule JSON.LD.Encoder do
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce([], fn ({subject, node}, result) ->
# 6.1)
node =
node =
if Map.has_key?(graph_map, subject) do
Map.put node, "@graph",
graph_map[subject]
|> Enum.sort_by(fn {s, _} -> s end)
|> Enum.reduce([], fn ({s, n}, graph_nodes) ->
|> Enum.reduce([], fn ({_s, n}, graph_nodes) ->
n = Map.delete(n, "usages")
if Map.size(n) == 1 and Map.has_key?(n, "@id") do
graph_nodes
@ -107,19 +105,24 @@ defmodule JSON.LD.Encoder do
{subject, predicate, node_object} =
{to_string(subject), to_string(predicate), nil}
node = Map.get(node_map, subject, %{"@id" => subject})
if is_node_object = (match?(%IRI{}, object) || match?(%BlankNode{}, object)) do
node_object = to_string(object)
node_map = Map.put_new(node_map, node_object, %{"@id" => node_object})
end
node =
{node_object, node_map} =
if is_node_object = (match?(%IRI{}, object) || match?(%BlankNode{}, object)) do
node_object = to_string(object)
node_map = Map.put_new(node_map, node_object, %{"@id" => node_object})
{node_object, node_map}
else
{node_object, node_map}
end
{node, node_map} =
if is_node_object and !use_rdf_type and predicate == @rdf_type do
Map.update(node, "@type", [node_object], fn types ->
node = Map.update(node, "@type", [node_object], fn types ->
if node_object in types do
types
else
types ++ [node_object]
end
end)
{node, node_map}
else
value = rdf_to_object(object, use_native_types)
node =
@ -130,20 +133,22 @@ defmodule JSON.LD.Encoder do
objects ++ [value]
end
end)
if is_node_object do
usage = %{
"node" => node,
"property" => predicate,
"value" => value,
}
node_map =
node_map =
if is_node_object do
usage = %{
"node" => node,
"property" => predicate,
"value" => value,
}
Map.update(node_map, node_object, %{"usages" => [usage]}, fn object_node ->
Map.update(object_node, "usages", [usage], fn usages ->
usages ++ [usage]
end)
end)
end
node
else
node_map
end
{node, node_map}
end
Map.put(node_map, subject, node)
end)
@ -154,7 +159,7 @@ defmodule JSON.LD.Encoder do
# node member of the usage maps with later enhanced usages
defp update_node_usages(node_map) do
Enum.reduce node_map, node_map, fn
({subject, %{"usages" => usages} = node}, node_map) ->
({subject, %{"usages" => _usages} = _node}, node_map) ->
update_in node_map, [subject, "usages"], fn usages ->
Enum.map usages, fn usage ->
Map.update! usage, "node", fn %{"@id" => subject} ->
@ -187,11 +192,11 @@ defmodule JSON.LD.Encoder do
extract_list(usage)
# 4.3.4)
skip =
{skip, list, list_nodes, head_path, head} =
if property == @rdf_first do
# 4.3.4.1)
if subject == @rdf_nil do
true
{true, list, list_nodes, head_path, head}
else
# 4.3.4.3-5)
head_path = [head["@id"], @rdf_rest]
@ -199,10 +204,10 @@ defmodule JSON.LD.Encoder do
# 4.3.4.6)
[_ | list] = list
[_ | list_nodes] = list_nodes
false
{false, list, list_nodes, head_path, head}
end
else
false
{false, list, list_nodes, head_path, head}
end
if skip do
graph_object
@ -272,33 +277,36 @@ defmodule JSON.LD.Encoder do
result = %{}
converted_value = literal
type = nil
if use_native_types do
cond do
datatype == XSD.string ->
converted_value = value
datatype == XSD.boolean ->
if RDF.Boolean.valid?(literal) do
converted_value = value
else
type = XSD.boolean
end
datatype in [XSD.integer, XSD.double] ->
if RDF.Literal.valid?(literal) do
converted_value = value
end
true ->
type = datatype
{converted_value, type, result} =
if use_native_types do
cond do
datatype == XSD.string ->
{value, type, result}
datatype == XSD.boolean ->
if RDF.Boolean.valid?(literal) do
{value, type, result}
else
{converted_value, XSD.boolean, result}
end
datatype in [XSD.integer, XSD.double] ->
if RDF.Literal.valid?(literal) do
{value, type, result}
else
{converted_value, type, result}
end
true ->
{converted_value, datatype, result}
end
else
cond do
datatype == RDF.langString ->
{converted_value, type, Map.put(result, "@language", literal.language)}
datatype == XSD.string ->
{converted_value, type, result}
true ->
{converted_value, datatype, result}
end
end
else
cond do
datatype == RDF.langString ->
result = Map.put(result, "@language", literal.language)
datatype == XSD.string ->
nil # no-op
true ->
type = datatype
end
end
result = type && Map.put(result, "@type", to_string(type)) || result
Map.put(result, "@value",

View file

@ -36,7 +36,7 @@ defmodule JSON.LD.Expansion do
defp do_expand(_, _, nil, _), do: nil
# 2) If element is a scalar, ...
defp do_expand(active_context, active_property, element, options)
defp do_expand(active_context, active_property, element, _options)
when is_binary(element) or is_number(element) or is_boolean(element) do
if active_property in [nil, "@graph"] do
nil
@ -71,216 +71,229 @@ defmodule JSON.LD.Expansion do
defp do_expand(active_context, active_property, element, options)
when is_map(element) do
# 5)
if Map.has_key?(element, "@context") do
active_context = JSON.LD.Context.update(active_context, Map.get(element, "@context"), [], options)
end
active_context =
if Map.has_key?(element, "@context") do
JSON.LD.Context.update(active_context, Map.get(element, "@context"), [], options)
else
active_context
end
# 6) and 7)
result = element
|> Enum.sort_by(fn {key, _} -> key end)
|> Enum.reduce(%{}, fn ({key, value}, result) ->
if (key != "@context") && # 7.1)
(expanded_property = expand_iri(key, active_context, false, true)) && # 7.2)
if key != "@context" do # 7.1)
expanded_property = expand_iri(key, active_context, false, true)
if expanded_property && # 7.2)
(String.contains?(expanded_property, ":") || JSON.LD.keyword?(expanded_property)) do # 7.3)
if JSON.LD.keyword?(expanded_property) do # 7.4)
if active_property == "@reverse", # 7.4.1)
do: raise JSON.LD.InvalidReversePropertyMapError,
message: "An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
if Map.has_key?(result, expanded_property), # 7.4.2)
do: raise JSON.LD.CollidingKeywordsError,
message: "Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
if JSON.LD.keyword?(expanded_property) do # 7.4)
if active_property == "@reverse", # 7.4.1)
do: raise JSON.LD.InvalidReversePropertyMapError,
message: "An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
if Map.has_key?(result, expanded_property), # 7.4.2)
do: raise JSON.LD.CollidingKeywordsError,
message: "Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
expanded_value = case expanded_property do
"@id" -> # 7.4.3)
if is_binary(value) do
expand_iri(value, active_context, true)
else
raise JSON.LD.InvalidIdValueError,
message: "#{inspect value} is not a valid @id value"
end
"@type" -> # 7.4.4)
cond do
is_binary(value) ->
expand_iri(value, active_context, true, true)
is_list(value) and Enum.all?(value, &is_binary/1) ->
Enum.map value, fn item ->
expand_iri(item, active_context, true, true) end
true ->
raise JSON.LD.InvalidTypeValueError,
message: "#{inspect value} is not a valid @type value"
end
"@graph" -> # 7.4.5)
do_expand(active_context, "@graph", value, options)
"@value" -> # 7.4.6)
if scalar?(value) or is_nil(value) do
if is_nil(value) do
{:skip, Map.put(result, "@value", nil)}
expanded_value = case expanded_property do
"@id" -> # 7.4.3)
if is_binary(value) do
expand_iri(value, active_context, true)
else
raise JSON.LD.InvalidIdValueError,
message: "#{inspect value} is not a valid @id value"
end
"@type" -> # 7.4.4)
cond do
is_binary(value) ->
expand_iri(value, active_context, true, true)
is_list(value) and Enum.all?(value, &is_binary/1) ->
Enum.map value, fn item ->
expand_iri(item, active_context, true, true) end
true ->
raise JSON.LD.InvalidTypeValueError,
message: "#{inspect value} is not a valid @type value"
end
"@graph" -> # 7.4.5)
do_expand(active_context, "@graph", value, options)
"@value" -> # 7.4.6)
if scalar?(value) or is_nil(value) do
if is_nil(value) do
{:skip, Map.put(result, "@value", nil)}
else
value
end
else
raise JSON.LD.InvalidValueObjectValueError,
message: "#{inspect value} is not a valid value for the @value member of a value object; neither a scalar nor null"
end
"@language" -> # 7.4.7)
if is_binary(value),
do: String.downcase(value),
else: raise JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect value} is not a valid language-tag"
"@index" -> # 7.4.8)
if is_binary(value),
do: value,
else: raise JSON.LD.InvalidIndexValueError,
message: "#{inspect value} is not a valid @index value"
"@list" -> # 7.4.9)
if active_property in [nil, "@graph"] do # 7.4.9.1)
{:skip, result}
else
value = do_expand(active_context, active_property, value, options)
# Spec FIXME: need to be sure that result is a list [from RDF.rb implementation]
value = if is_list(value),
do: value,
else: [value]
# If expanded value is a list object, a list of lists error has been detected and processing is aborted.
# Spec FIXME: Also look at each object if result is a list [from RDF.rb implementation]
if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end),
do: raise JSON.LD.ListOfListsError,
message: "List of lists in #{inspect value}"
value
end
else
raise JSON.LD.InvalidValueObjectValueError,
message: "#{inspect value} is not a valid value for the @value member of a value object; neither a scalar nor null"
end
"@language" -> # 7.4.7)
if is_binary(value),
do: String.downcase(value),
else: raise JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect value} is not a valid language-tag"
"@index" -> # 7.4.8)
if is_binary(value),
do: value,
else: raise JSON.LD.InvalidIndexValueError,
message: "#{inspect value} is not a valid @index value"
"@list" -> # 7.4.9)
if active_property in [nil, "@graph"] do # 7.4.9.1)
{:skip, result}
else
value = do_expand(active_context, active_property, value, options)
# Spec FIXME: need to be sure that result is a list [from RDF.rb implementation]
value = if is_list(value),
do: value,
else: [value]
# If expanded value is a list object, a list of lists error has been detected and processing is aborted.
# Spec FIXME: Also look at each object if result is a list [from RDF.rb implementation]
if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end),
do: raise JSON.LD.ListOfListsError,
message: "List of lists in #{inspect value}"
value
end
"@set" -> # 7.4.10)
do_expand(active_context, active_property, value, options)
"@reverse" -> # 7.4.11)
unless is_map(value),
do: raise JSON.LD.InvalidReverseValueError,
message: "#{inspect value} is not a valid @reverse value"
expanded_value = do_expand(active_context, "@reverse", value, options) # 7.4.11.1)
new_result =
if Map.has_key?(expanded_value, "@reverse") do # 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps:
Enum.reduce expanded_value["@reverse"], result,
fn ({property, item}, new_result) ->
items = if is_list(item),
do: item,
else: [item]
Map.update(new_result, property, items, fn members ->
members ++ items
end)
end
else
result
end
if Map.keys(expanded_value) != ["@reverse"] do # 7.4.11.3)
reverse_map =
Enum.reduce expanded_value, Map.get(new_result, "@reverse", %{}), fn
({property, items}, reverse_map) when property != "@reverse" ->
Enum.each(items, fn item ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
end)
Map.update(reverse_map, property, items, fn members ->
members ++ items
end)
(_, reverse_map) -> reverse_map
end
new_result = Map.put(new_result, "@reverse", reverse_map)
end
{:skip, new_result}
_ ->
nil
end
# 7.4.12)
case expanded_value do
nil ->
result
{:skip, new_result} ->
new_result
expanded_value ->
Map.put(result, expanded_property, expanded_value)
end
else # expanded_property is not a keyword
term_def = active_context.term_defs[key]
expanded_value = cond do
# 7.5) Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows:
is_map(value) && term_def && term_def.container_mapping == "@language" ->
value
|> Enum.sort_by(fn {language, _} -> language end)
|> Enum.reduce([], fn ({language, language_value}, language_map_result) ->
language_map_result ++ (
if(is_list(language_value),
do: language_value,
else: [language_value])
|> Enum.map(fn
item when is_binary(item) ->
%{
"@value" => item,
"@language" => String.downcase(language)
}
item ->
raise JSON.LD.InvalidLanguageMapValueError,
message: "#{inspect item} is not a valid language map value"
"@set" -> # 7.4.10)
do_expand(active_context, active_property, value, options)
"@reverse" -> # 7.4.11)
unless is_map(value),
do: raise JSON.LD.InvalidReverseValueError,
message: "#{inspect value} is not a valid @reverse value"
expanded_value = do_expand(active_context, "@reverse", value, options) # 7.4.11.1)
new_result =
if Map.has_key?(expanded_value, "@reverse") do # 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps:
Enum.reduce expanded_value["@reverse"], result,
fn ({property, item}, new_result) ->
items = if is_list(item),
do: item,
else: [item]
Map.update(new_result, property, items, fn members ->
members ++ items
end)
)
end)
# 7.6)
is_map(value) && term_def && term_def.container_mapping == "@index" ->
value
|> Enum.sort_by(fn {index, _} -> index end)
|> Enum.reduce([], fn ({index, index_value}, index_map_result) ->
index_map_result ++ (
index_value = if(is_list(index_value),
do: index_value,
else: [index_value])
index_value = do_expand(active_context, key, index_value, options)
Enum.map(index_value, fn item ->
Map.put_new(item, "@index", index)
end)
)
end)
# 7.7)
true ->
do_expand(active_context, key, value, options)
end
# 7.8)
if is_nil(expanded_value) do
result
else
# 7.9)
if (term_def && term_def.container_mapping == "@list") &&
!(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do
expanded_value = %{"@list" =>
(if is_list(expanded_value),
do: expanded_value,
else: [expanded_value])}
end
else
result
end
new_result =
if Map.keys(expanded_value) != ["@reverse"] do # 7.4.11.3)
reverse_map =
Enum.reduce expanded_value, Map.get(new_result, "@reverse", %{}), fn
({property, items}, reverse_map) when property != "@reverse" ->
Enum.each(items, fn item ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
end)
Map.update(reverse_map, property, items, fn members ->
members ++ items
end)
(_, reverse_map) -> reverse_map
end
Map.put(new_result, "@reverse", reverse_map)
else
new_result
end
{:skip, new_result}
_ ->
nil
end
# 7.10) Otherwise, if the term definition associated to key indicates that it is a reverse property
# Spec FIXME: this is not an otherwise [from RDF.rb implementation]
if term_def && term_def.reverse_property do
reverse_map = Map.get(result, "@reverse", %{})
reverse_map =
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
|> Enum.reduce(reverse_map, fn (item, reverse_map) ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
Map.update reverse_map, expanded_property, [item], fn members ->
members ++ [item]
end
# 7.4.12)
case expanded_value do
nil ->
result
{:skip, new_result} ->
new_result
expanded_value ->
Map.put(result, expanded_property, expanded_value)
end
else # expanded_property is not a keyword
term_def = active_context.term_defs[key]
expanded_value = cond do
# 7.5) Otherwise, if key's container mapping in active context is @language and value is a JSON object then value is expanded from a language map as follows:
is_map(value) && term_def && term_def.container_mapping == "@language" ->
value
|> Enum.sort_by(fn {language, _} -> language end)
|> Enum.reduce([], fn ({language, language_value}, language_map_result) ->
language_map_result ++ (
if(is_list(language_value),
do: language_value,
else: [language_value])
|> Enum.map(fn
item when is_binary(item) ->
%{
"@value" => item,
"@language" => String.downcase(language)
}
item ->
raise JSON.LD.InvalidLanguageMapValueError,
message: "#{inspect item} is not a valid language map value"
end)
)
end)
Map.put(result, "@reverse", reverse_map)
else # 7.11)
expanded_value = if is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
Map.update result, expanded_property, expanded_value,
fn values -> expanded_value ++ values end
# 7.6)
is_map(value) && term_def && term_def.container_mapping == "@index" ->
value
|> Enum.sort_by(fn {index, _} -> index end)
|> Enum.reduce([], fn ({index, index_value}, index_map_result) ->
index_map_result ++ (
index_value = if(is_list(index_value),
do: index_value,
else: [index_value])
index_value = do_expand(active_context, key, index_value, options)
Enum.map(index_value, fn item ->
Map.put_new(item, "@index", index)
end)
)
end)
# 7.7)
true ->
do_expand(active_context, key, value, options)
end
# 7.8)
if is_nil(expanded_value) do
result
else
# 7.9)
expanded_value =
if (term_def && term_def.container_mapping == "@list") &&
!(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do
%{"@list" =>
(if is_list(expanded_value),
do: expanded_value,
else: [expanded_value])}
else
expanded_value
end
# 7.10) Otherwise, if the term definition associated to key indicates that it is a reverse property
# Spec FIXME: this is not an otherwise [from RDF.rb implementation]
if term_def && term_def.reverse_property do
reverse_map = Map.get(result, "@reverse", %{})
reverse_map =
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value])
|> Enum.reduce(reverse_map, fn (item, reverse_map) ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do: raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect item}"
Map.update reverse_map, expanded_property, [item], fn members ->
members ++ [item]
end
end)
Map.put(result, "@reverse", reverse_map)
else # 7.11)
expanded_value = if is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
Map.update result, expanded_property, expanded_value,
fn values -> expanded_value ++ values end
end
end
end
else
result
end
else
result
@ -321,16 +334,16 @@ defmodule JSON.LD.Expansion do
end
# 11) If result contains only the key @language, set result to null.
if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"),
do: result = nil
result = if is_map(result) and map_size(result) == 1 and Map.has_key?(result, "@language"),
do: nil, else: result
# 12) If active property is null or @graph, drop free-floating values as follows:
# Spec FIXME: Due to case 10) we might land with a list here; other implementations deal with that, by just returning in step 10)
if is_map(result) and active_property in [nil, "@graph"] and (
result = if is_map(result) and active_property in [nil, "@graph"] and (
Enum.empty?(result) or
Map.has_key?(result, "@value") or Map.has_key?(result, "@list") or
(map_size(result) == 1 and Map.has_key?(result, "@id"))),
do: result = nil
do: nil, else: result
result
end

View file

@ -92,26 +92,26 @@ defmodule JSON.LD.Flattening do
# 2)
def generate_node_map(element, node_map, node_id_map, active_graph, active_subject,
active_property, list) when is_map(element) do
identifier_map = %{}
counter = 1
node_map = Map.put_new(node_map, active_graph, %{})
node = node_map[active_graph][active_subject]
# 3)
if old_types = Map.get(element, "@type") do
new_types = Enum.reduce(List.wrap(old_types), [],
fn (item, types) ->
if blank_node_id?(item) do
identifier = generate_blank_node_id(node_id_map, item)
types ++ [identifier]
else
types ++ [item]
end
end)
element = Map.put(element, "@type",
if(is_list(old_types), do: new_types, else: List.first(new_types)))
end
element =
if old_types = Map.get(element, "@type") do
new_types = Enum.reduce(List.wrap(old_types), [],
fn (item, types) ->
if blank_node_id?(item) do
identifier = generate_blank_node_id(node_id_map, item)
types ++ [identifier]
else
types ++ [item]
end
end)
Map.put(element, "@type",
if(is_list(old_types), do: new_types, else: List.first(new_types)))
else
element
end
cond do
@ -136,7 +136,7 @@ defmodule JSON.LD.Flattening do
# 5)
Map.has_key?(element, "@list") ->
{:ok, result_list} = new_list
{:ok, result_list} = new_list()
{node_map, result} =
try do
{
@ -173,19 +173,22 @@ defmodule JSON.LD.Flattening do
end
# 6.3)
unless Map.has_key?(node_map[active_graph], id) do
node_map = Map.update!(node_map, active_graph, fn graph ->
Map.put_new(graph, id, %{"@id" => id})
end)
end
node_map =
unless Map.has_key?(node_map[active_graph], id) do
Map.update!(node_map, active_graph, fn graph ->
Map.put_new(graph, id, %{"@id" => id})
end)
else
node_map
end
# 6.4) TODO: Spec fixme: "this line is asked for by the spec, but it breaks various tests" (according to Java and Go implementation, which perform this step before 6.7) instead)
node = node_map[active_graph][id]
# 6.5)
if is_map(active_subject) do
unless Map.has_key?(node, active_property) do
node_map =
node_map =
if is_map(active_subject) do
unless Map.has_key?(node, active_property) do
update_in(node_map, [active_graph, id, active_property], fn
nil -> [active_subject]
items ->
@ -193,13 +196,14 @@ defmodule JSON.LD.Flattening do
do: items ++ [active_subject],
else: items
end)
end
# 6.6)
else
unless is_nil(active_property) do
reference = %{"@id" => id}
if is_nil(list) do
node_map =
else
node_map
end
# 6.6)
else
unless is_nil(active_property) do
reference = %{"@id" => id}
if is_nil(list) do
update_in(node_map, [active_graph, active_subject, active_property], fn
nil -> [reference]
items ->
@ -207,75 +211,98 @@ defmodule JSON.LD.Flattening do
do: items ++ [reference],
else: items
end)
# 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, should be REFERENCE
# 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, should be REFERENCE
else
append_to_list(list, reference)
node_map
end
else
append_to_list(list, reference)
node_map
end
end
end
# 6.7)
if Map.has_key?(element, "@type") do
node_map =
Enum.reduce element["@type"], node_map, fn (type, node_map) ->
update_in(node_map, [active_graph, id, "@type"], fn
nil -> [type]
items ->
unless type in items,
do: items ++ [type],
else: items
end)
end
element = Map.delete(element, "@type")
end
{node_map, element} =
if Map.has_key?(element, "@type") do
node_map =
Enum.reduce element["@type"], node_map, fn (type, node_map) ->
update_in(node_map, [active_graph, id, "@type"], fn
nil -> [type]
items ->
unless type in items,
do: items ++ [type],
else: items
end)
end
element = Map.delete(element, "@type")
{node_map, element}
else
{node_map, element}
end
# 6.8)
if Map.has_key?(element, "@index") do
{element_index, element} = Map.pop(element, "@index")
if node_index = get_in(node_map, [active_graph, id, "@index"]) do
if not deep_compare(node_index, element_index) do
raise JSON.LD.ConflictingIndexesError,
message: "Multiple conflicting indexes have been found for the same node."
end
else
{node_map, element} =
if Map.has_key?(element, "@index") do
{element_index, element} = Map.pop(element, "@index")
node_map =
update_in node_map, [active_graph, id], fn node ->
Map.put(node, "@index", element_index)
if node_index = get_in(node_map, [active_graph, id, "@index"]) do
if not deep_compare(node_index, element_index) do
raise JSON.LD.ConflictingIndexesError,
message: "Multiple conflicting indexes have been found for the same node."
end
else
update_in node_map, [active_graph, id], fn node ->
Map.put(node, "@index", element_index)
end
end
{node_map, element}
else
{node_map, element}
end
end
# 6.9)
if Map.has_key?(element, "@reverse") do
referenced_node = %{"@id" => id}
{reverse_map, element} = Map.pop(element, "@reverse")
node_map =
Enum.reduce reverse_map, node_map, fn ({property, values}, node_map) ->
{node_map, element} =
if Map.has_key?(element, "@reverse") do
referenced_node = %{"@id" => id}
{reverse_map, element} = Map.pop(element, "@reverse")
node_map = Enum.reduce reverse_map, node_map, fn ({property, values}, node_map) ->
Enum.reduce values, node_map, fn (value, node_map) ->
generate_node_map(value, node_map, node_id_map, active_graph,
referenced_node, property)
end
end
end
{node_map, element}
else
{node_map, element}
end
# 6.10)
if Map.has_key?(element, "@graph") do
{graph, element} = Map.pop(element, "@graph")
node_map = generate_node_map(graph, node_map, node_id_map, id)
end
{node_map, element} =
if Map.has_key?(element, "@graph") do
{graph, element} = Map.pop(element, "@graph")
{generate_node_map(graph, node_map, node_id_map, id), element}
else
{node_map, element}
end
# 6.11)
element
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(node_map, fn ({property, value}, node_map) ->
if blank_node_id?(property) do
property = generate_blank_node_id(node_id_map, property)
end
unless Map.has_key?(node_map[active_graph][id], property) do
node_map = update_in node_map, [active_graph, id], fn node ->
Map.put(node, property, [])
property =
if blank_node_id?(property) do
generate_blank_node_id(node_id_map, property)
else
property
end
node_map =
unless Map.has_key?(node_map[active_graph][id], property) do
update_in node_map, [active_graph, id], fn node ->
Map.put(node, property, [])
end
else
node_map
end
end
generate_node_map(value, node_map, node_id_map, active_graph, id, property)
end)
end

View file

@ -22,47 +22,60 @@ defmodule JSON.LD.IRIExpansion do
def expand_iri(value, active_context, doc_relative, vocab, local_context, defined) do
# 2)
if local_context && (local_def = local_context[value]) && defined[value] != true do
{active_context, defined} =
{active_context, defined} =
if local_context && local_context[value] && defined[value] != true do
local_def = local_context[value]
JSON.LD.Context.create_term_definition(
active_context, local_context, value, local_def, defined)
end
else
{active_context, defined}
end
result = cond do
# 3) If vocab is true and the active context has a term definition for value, return the associated IRI mapping.
vocab && Map.has_key?(active_context.term_defs, value) ->
(term_def = active_context.term_defs[value]) && term_def.iri_mapping
# 4) If value contains a colon (:), it is either an absolute IRI, a compact IRI, or a blank node identifier
String.contains?(value, ":") ->
case compact_iri_parts(value) do
[prefix, suffix] ->
# 4.3)
if local_context && (local_def = local_context[prefix]) && defined[prefix] != true do
{result, active_context, defined} =
cond do
# 3) If vocab is true and the active context has a term definition for value, return the associated IRI mapping.
vocab && Map.has_key?(active_context.term_defs, value) ->
result = (term_def = active_context.term_defs[value]) && term_def.iri_mapping
{result, active_context, defined}
# 4) If value contains a colon (:), it is either an absolute IRI, a compact IRI, or a blank node identifier
String.contains?(value, ":") ->
case compact_iri_parts(value) do
[prefix, suffix] ->
# 4.3)
{active_context, defined} =
JSON.LD.Context.create_term_definition(
active_context, local_context, prefix, local_def, defined)
end
# 4.4)
if prefix_def = active_context.term_defs[prefix] do
prefix_def.iri_mapping <> suffix
else
value # 4.5)
end
nil -> value # 4.2)
end
# 5) If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value.
vocab && (vocabulary_mapping = active_context.vocab) ->
vocabulary_mapping <> value
# 6) Otherwise, if document relative is true, set value to the result of resolving value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987].
doc_relative ->
absolute_iri(value, JSON.LD.Context.base(active_context))
if local_context && local_context[prefix] && defined[prefix] != true do
local_def = local_context[prefix]
JSON.LD.Context.create_term_definition(
active_context, local_context, prefix, local_def, defined)
else
{active_context, defined}
end
# 4.4)
result =
if prefix_def = active_context.term_defs[prefix] do
prefix_def.iri_mapping <> suffix
else
value # 4.5)
end
{result, active_context, defined}
nil ->
{value, active_context, defined} # 4.2)
end
# 5) If vocab is true, and active context has a vocabulary mapping, return the result of concatenating the vocabulary mapping with value.
vocab && active_context.vocab ->
vocabulary_mapping = active_context.vocab
{vocabulary_mapping <> value, active_context, defined}
# 6) Otherwise, if document relative is true, set value to the result of resolving value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987].
doc_relative ->
{absolute_iri(value, JSON.LD.Context.base(active_context)), active_context, defined}
# TODO: RDF.rb's implementation differs from the spec here, by checking if base_iri is actually present in the previous clause and adding the following additional clause. Another Spec error?
# if local_context && RDF::URI(value).relative?
# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted.
# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}"
# 7) Return value as is.
true -> value
end
# 7) Return value as is.
true ->
{value, active_context, defined}
end
if local_context do
{result, active_context, defined}

View file

@ -3,8 +3,6 @@ defmodule JSON.LD.NodeIdentifierMap do
use GenServer
import JSON.LD
# Client API
def start_link(opts \\ []) do
@ -32,8 +30,8 @@ defmodule JSON.LD.NodeIdentifierMap do
end
def handle_call({:generate_id, identifier}, _, %{map: map, counter: counter} = state) do
if identifier && (mapped_identifier = map[identifier]) do
{:reply, mapped_identifier, state}
if identifier && map[identifier] do
{:reply, map[identifier], state}
else
blank_node_id = "_:b#{counter}"
{:reply, blank_node_id, %{

View file

@ -39,7 +39,7 @@ defmodule JSON.LD do
Returns if the given value is a JSON-LD keyword.
"""
def keyword?(value) when is_binary(value) and value in @keywords, do: true
def keyword?(value), do: false
def keyword?(_value), do: false
@doc """