Add typespecs and fix some Credo warnings (#6)

This commit is contained in:
rustra 2020-06-22 01:21:33 +02:00 committed by GitHub
parent bee796b592
commit 40b44edb0a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 706 additions and 686 deletions

View file

@ -2,32 +2,35 @@ defmodule JSON.LD.Compaction do
@moduledoc nil
import JSON.LD.Utils
alias JSON.LD.Context
def compact(input, context, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
active_context = JSON.LD.context(context, options),
inverse_context = Context.inverse(active_context),
expanded = JSON.LD.expand(input, options) do
result =
case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do
[] ->
%{}
alias JSON.LD.{Context, Options}
result when is_list(result) ->
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
%{compact_iri("@graph", active_context, inverse_context, nil, true) => result}
@spec compact(map | [map], map | nil, Options.t() | Enum.t()) :: map
def compact(input, context, options \\ %Options{}) do
options = Options.new(options)
active_context = JSON.LD.context(context, options)
inverse_context = Context.inverse(active_context)
expanded = JSON.LD.expand(input, options)
result ->
result
end
result =
case do_compact(expanded, active_context, inverse_context, nil, options.compact_arrays) do
[] ->
%{}
if Context.empty?(active_context),
do: result,
else: Map.put(result, "@context", context["@context"] || context)
end
result when is_list(result) ->
# TODO: Spec fixme? We're setting vocab to true, as other implementations do it, but this is not mentioned in the spec
%{compact_iri("@graph", active_context, inverse_context, nil, true) => result}
result ->
result
end
if Context.empty?(active_context),
do: result,
else: Map.put(result, "@context", context["@context"] || context)
end
@spec do_compact(any, Context.t(), map, String.t() | nil, boolean) :: any
defp do_compact(
element,
active_context,
@ -92,6 +95,7 @@ defmodule JSON.LD.Compaction do
end
end
@spec do_compact_non_scalar(any, Context.t(), map, String.t() | nil, boolean) :: any
defp do_compact_non_scalar(
element,
active_context,
@ -133,13 +137,10 @@ defmodule JSON.LD.Compaction do
[compact_iri(expanded_type, active_context, inverse_context, nil, true)]
end)
# 7.1.2.3)
|> case(
do:
(
[compacted_value] -> compacted_value
compacted_value -> compacted_value
)
)
|> case do
[compacted_value] -> compacted_value
compacted_value -> compacted_value
end
end
# 7.1.3)
@ -162,8 +163,7 @@ defmodule JSON.LD.Compaction do
if term_def && term_def.reverse_property do
# 7.2.2.1.1)
value =
if (!compact_arrays or term_def.container_mapping == "@set") and
!is_list(value) do
if (!compact_arrays or term_def.container_mapping == "@set") and !is_list(value) do
[value]
else
value
@ -324,6 +324,7 @@ defmodule JSON.LD.Compaction do
end)
end
@spec merge_compacted_value(map, String.t(), any) :: map
defp merge_compacted_value(map, key, value) do
Map.update(map, key, value, fn
old_value when is_list(old_value) and is_list(value) ->
@ -345,6 +346,7 @@ defmodule JSON.LD.Compaction do
Details at <https://www.w3.org/TR/json-ld-api/#iri-compaction>
"""
@spec compact_iri(any, Context.t(), map, any | nil, boolean, boolean) :: any | nil
def compact_iri(
iri,
active_context,
@ -398,68 +400,70 @@ defmodule JSON.LD.Compaction do
else
# 2.6.4) For each item in list:
{common_type, common_language} =
Enum.reduce_while(list, {common_type, common_language}, fn item,
{common_type,
common_language} ->
# 2.6.4.1) Initialize item language to @none and item type to @none.
{item_type, item_language} = {"@none", "@none"}
# 2.6.4.2) If item contains the key @value:
{item_type, item_language} =
if Map.has_key?(item, "@value") do
cond do
# 2.6.4.2.1) If item contains the key @language, then set item language to its associated value.
Map.has_key?(item, "@language") ->
{item_type, item["@language"]}
Enum.reduce_while(
list,
{common_type, common_language},
fn item, {common_type, common_language} ->
# 2.6.4.1) Initialize item language to @none and item type to @none.
{item_type, item_language} = {"@none", "@none"}
# 2.6.4.2) If item contains the key @value:
{item_type, item_language} =
if Map.has_key?(item, "@value") do
cond do
# 2.6.4.2.1) If item contains the key @language, then set item language to its associated value.
Map.has_key?(item, "@language") ->
{item_type, item["@language"]}
# 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value.
Map.has_key?(item, "@type") ->
{item["@type"], item_language}
# 2.6.4.2.2) Otherwise, if item contains the key @type, set item type to its associated value.
Map.has_key?(item, "@type") ->
{item["@type"], item_language}
# 2.6.4.2.3) Otherwise, set item language to @null.
true ->
{item_type, "@null"}
# 2.6.4.2.3) Otherwise, set item language to @null.
true ->
{item_type, "@null"}
end
# 2.6.4.3) Otherwise, set item type to @id.
else
{"@id", item_language}
end
# 2.6.4.3) Otherwise, set item type to @id.
common_language =
cond do
# 2.6.4.4) If common language is null, set it to item language.
is_nil(common_language) ->
item_language
# 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages.
item_language != common_language and Map.has_key?(item, "@value") ->
"@none"
true ->
common_language
end
common_type =
cond do
# 2.6.4.6) If common type is null, set it to item type.
is_nil(common_type) ->
item_type
# 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types.
item_type != common_type ->
"@none"
true ->
common_type
end
# 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items.
if common_language == "@none" and common_type == "@none" do
{:halt, {common_type, common_language}}
else
{"@id", item_language}
{:cont, {common_type, common_language}}
end
common_language =
cond do
# 2.6.4.4) If common language is null, set it to item language.
is_nil(common_language) ->
item_language
# 2.6.4.5) Otherwise, if item language does not equal common language and item contains the key @value, then set common language to @none because list items have conflicting languages.
item_language != common_language and Map.has_key?(item, "@value") ->
"@none"
true ->
common_language
end
common_type =
cond do
# 2.6.4.6) If common type is null, set it to item type.
is_nil(common_type) ->
item_type
# 2.6.4.7) Otherwise, if item type does not equal common type, then set common type to @none because list items have conflicting types.
item_type != common_type ->
"@none"
true ->
common_type
end
# 2.6.4.8) If common language is @none and common type is @none, then stop processing items in the list because it has been detected that there is no common language or type amongst the items.
if common_language == "@none" and common_type == "@none" do
{:halt, {common_type, common_language}}
else
{:cont, {common_type, common_language}}
end
end)
)
# 2.6.5) If common language is null, set it to @none.
common_language = if is_nil(common_language), do: "@none", else: common_language
@ -535,9 +539,9 @@ defmodule JSON.LD.Compaction do
# 2.12.1) If the result of using the IRI compaction algorithm, passing active context, inverse context, the value associated with the @id key in value for iri, true for vocab, and true for document relative has a term definition in the active context with an IRI mapping that equals the value associated with the @id key in value, then append @vocab, @id, and @none, in that order, to preferred values.
# TODO: Spec fixme? document_relative is not a specified parameter of compact_iri
compact_id = compact_iri(value["@id"], active_context, inverse_context, nil, true)
term_def = active_context.term_defs[compact_id]
if (term_def = active_context.term_defs[compact_id]) &&
term_def.iri_mapping == value["@id"] do
if term_def && term_def.iri_mapping == value["@id"] do
preferred_values ++ ~w[@vocab @id @none]
# 2.12.2) Otherwise, append @id, @vocab, and @none, in that order, to preferred values.
@ -624,11 +628,13 @@ defmodule JSON.LD.Compaction do
end
end
@spec shortest_or_least?(String.t(), String.t()) :: boolean
defp shortest_or_least?(a, b) do
(a_len = String.length(a)) < (b_len = String.length(b)) or
(a_len == b_len and a < b)
end
@spec remove_base(String.t(), String.t() | nil) :: String.t()
defp remove_base(iri, nil), do: iri
defp remove_base(iri, base) do
@ -638,15 +644,13 @@ defmodule JSON.LD.Compaction do
String.split_at(iri, base_len) |> elem(1)
else
case URI.parse(base) do
%URI{path: nil} ->
iri
base ->
do_remove_base(iri, %URI{base | path: parent_path(base.path)}, 0)
%URI{path: nil} -> iri
base -> do_remove_base(iri, %URI{base | path: parent_path(base.path)}, 0)
end
end
end
@spec do_remove_base(String.t(), URI.t(), non_neg_integer) :: String.t()
defp do_remove_base(iri, base, index) do
base_str = URI.to_string(base)
@ -666,6 +670,7 @@ defmodule JSON.LD.Compaction do
end
end
@spec parent_path(String.t()) :: String.t()
defp parent_path("/"), do: "/"
defp parent_path(path) do
@ -680,6 +685,7 @@ defmodule JSON.LD.Compaction do
Details at <https://www.w3.org/TR/json-ld-api/#value-compaction>
"""
@spec compact_value(any, Context.t(), map, String.t()) :: any
def compact_value(value, active_context, inverse_context, active_property) do
term_def = active_context.term_defs[active_property]
# 1) Initialize number members to the number of members value contains.
@ -750,6 +756,7 @@ defmodule JSON.LD.Compaction do
Details at <https://www.w3.org/TR/json-ld-api/#term-selection>
"""
@spec select_term(map, String.t(), [String.t()], String.t(), [String.t()]) :: String.t()
def select_term(inverse_context, iri, containers, type_language, preferred_values) do
container_map = inverse_context[iri]

View file

@ -1,48 +1,64 @@
defmodule JSON.LD.Context do
import JSON.LD.{IRIExpansion, Utils}
alias JSON.LD.Context.TermDefinition
alias JSON.LD.Options
alias RDF.IRI
@type local :: map | String.t() | nil
@type remote :: [map]
@type value :: map | String.t() | nil
@type t :: %__MODULE__{
term_defs: map,
default_language: String.t() | nil,
vocab: nil,
base_iri: String.t() | boolean | nil,
api_base_iri: String.t() | nil
}
defstruct term_defs: %{},
default_language: nil,
vocab: nil,
base_iri: false,
api_base_iri: nil
import JSON.LD.IRIExpansion
import JSON.LD.Utils
alias JSON.LD.Context.TermDefinition
alias RDF.IRI
def base(%JSON.LD.Context{base_iri: false, api_base_iri: api_base_iri}),
@spec base(t) :: String.t() | nil
def base(%__MODULE__{base_iri: false, api_base_iri: api_base_iri}),
do: api_base_iri
def base(%JSON.LD.Context{base_iri: base_iri}),
def base(%__MODULE__{base_iri: base_iri}),
do: base_iri
def new(options \\ %JSON.LD.Options{}),
do: %JSON.LD.Context{api_base_iri: JSON.LD.Options.new(options).base}
@spec new(Options.t()) :: t
def new(options \\ %Options{}),
do: %__MODULE__{api_base_iri: Options.new(options).base}
@spec create(map, Options.t()) :: t
def create(%{"@context" => json_ld_context}, options),
do: new(options) |> update(json_ld_context, [], options)
do: options |> new() |> update(json_ld_context, [], options)
def update(active, local, remote \\ [], options \\ %JSON.LD.Options{})
@spec update(t, [local] | local, remote, Options.t()) :: t
def update(active, local, remote \\ [], options \\ %Options{})
def update(%JSON.LD.Context{} = active, local, remote, options) when is_list(local) do
def update(%__MODULE__{} = active, local, remote, options) when is_list(local) do
Enum.reduce(local, active, fn local, result ->
do_update(result, local, remote, options)
end)
end
# 2) If local context is not an array, set it to an array containing only local context.
def update(%JSON.LD.Context{} = active, local, remote, options) do
update(active, [local], remote, options)
end
def update(%__MODULE__{} = active, local, remote, options),
do: update(active, [local], remote, options)
# 3.1) If context is null, set result to a newly-initialized active context and continue with the next context. The base IRI of the active context is set to the IRI of the currently being processed document (which might be different from the currently being processed context), if available; otherwise to null. If set, the base option of a JSON-LD API Implementation overrides the base IRI.
defp do_update(%JSON.LD.Context{}, nil, _remote, options) do
new(options)
end
@spec do_update(t, local, remote, Options.t()) :: t
defp do_update(%__MODULE__{}, nil, _remote, options),
do: new(options)
# 3.2) If context is a string, [it's interpreted as a remote context]
defp do_update(%JSON.LD.Context{} = active, local, remote, options) when is_binary(local) do
defp do_update(%__MODULE__{} = active, local, remote, options) when is_binary(local) do
# 3.2.1)
local = absolute_iri(local, base(active))
@ -97,7 +113,7 @@ defmodule JSON.LD.Context do
end
# 3.4) - 3.8)
defp do_update(%JSON.LD.Context{} = active, local, remote, _) when is_map(local) do
defp do_update(%__MODULE__{} = active, local, remote, _) when is_map(local) do
with {base, local} <- Map.pop(local, "@base", false),
{vocab, local} <- Map.pop(local, "@vocab", false),
{language, local} <- Map.pop(local, "@language", false) do
@ -110,12 +126,12 @@ defmodule JSON.LD.Context do
end
# 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted.
defp do_update(_, local, _, _),
do:
raise(JSON.LD.InvalidLocalContextError,
message: "#{inspect(local)} is not a valid @context value"
)
defp do_update(_, local, _, _) do
raise JSON.LD.InvalidLocalContextError,
message: "#{inspect(local)} is not a valid @context value"
end
@spec set_base(t, boolean, remote) :: t
defp set_base(active, false, _),
do: active
@ -126,10 +142,10 @@ defmodule JSON.LD.Context do
cond do
# TODO: this slightly differs from the spec, due to our false special value for base_iri; add more tests
is_nil(base) or IRI.absolute?(base) ->
%JSON.LD.Context{active | base_iri: base}
%__MODULE__{active | base_iri: base}
active.base_iri ->
%JSON.LD.Context{active | base_iri: absolute_iri(base, active.base_iri)}
%__MODULE__{active | base_iri: absolute_iri(base, active.base_iri)}
true ->
raise JSON.LD.InvalidBaseIRIError,
@ -137,31 +153,33 @@ defmodule JSON.LD.Context do
end
end
@spec set_vocab(t, boolean | nil) :: t
defp set_vocab(active, false), do: active
defp set_vocab(active, vocab) do
if is_nil(vocab) or IRI.absolute?(vocab) or blank_node_id?(vocab) do
%JSON.LD.Context{active | vocab: vocab}
%__MODULE__{active | vocab: vocab}
else
raise JSON.LD.InvalidVocabMappingError,
message: "#{inspect(vocab)} is not a valid vocabulary mapping"
end
end
@spec set_language(t, boolean | nil) :: t
defp set_language(active, false), do: active
defp set_language(active, nil),
do: %JSON.LD.Context{active | default_language: nil}
do: %__MODULE__{active | default_language: nil}
defp set_language(active, language) when is_binary(language),
do: %JSON.LD.Context{active | default_language: String.downcase(language)}
do: %__MODULE__{active | default_language: String.downcase(language)}
defp set_language(_, language),
do:
raise(JSON.LD.InvalidDefaultLanguageError,
message: "#{inspect(language)} is not a valid language"
)
defp set_language(_, language) do
raise JSON.LD.InvalidDefaultLanguageError,
message: "#{inspect(language)} is not a valid language"
end
@spec language(t, String.t()) :: String.t() | nil
def language(active, term) do
case Map.get(active.term_defs, term, %TermDefinition{}).language_mapping do
false -> active.default_language
@ -169,6 +187,7 @@ defmodule JSON.LD.Context do
end
end
@spec create_term_definitions(t, map, map) :: t
defp create_term_definitions(active, local, defined \\ %{}) do
{active, _} =
Enum.reduce(local, {active, defined}, fn {term, value}, {active, defined} ->
@ -183,6 +202,7 @@ defmodule JSON.LD.Context do
see <https://www.w3.org/TR/json-ld-api/#create-term-definition>
"""
@spec create_term_definition(t, map, String.t(), value, map) :: {t, map}
def create_term_definition(active, local, term, value, defined)
def create_term_definition(active, _, "@base", _, defined), do: {active, defined}
@ -191,11 +211,10 @@ defmodule JSON.LD.Context do
def create_term_definition(active, local, term, value, defined) do
# 3)
if term in JSON.LD.keywords(),
do:
raise(JSON.LD.KeywordRedefinitionError,
message: "#{inspect(term)} is a keyword and can not be defined in context"
)
if term in JSON.LD.keywords() do
raise JSON.LD.KeywordRedefinitionError,
message: "#{inspect(term)} is a keyword and can not be defined in context"
end
# 1)
case defined[term] do
@ -207,23 +226,18 @@ defmodule JSON.LD.Context do
raise JSON.LD.CyclicIRIMappingError
nil ->
do_create_term_definition(
active,
local,
term,
value,
# 2)
Map.put(defined, term, false)
)
# 2)
do_create_term_definition(active, local, term, value, Map.put(defined, term, false))
end
end
@spec do_create_term_definition(t, map, String.t(), value, map) :: {t, map}
defp do_create_term_definition(active, _local, term, nil, defined) do
{
# (if Map.has_key?(active.term_defs, term),
# do: put_in(active, [:term_defs, term], nil),
# else: raise "NotImplemented"),
%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, nil)},
# (if Map.has_key?(active.term_defs, term),
# do: put_in(active, [:term_defs, term], nil),
# else: raise "NotImplemented"),
%__MODULE__{active | term_defs: Map.put(active.term_defs, term, nil)},
Map.put(defined, term, true)
}
end
@ -251,29 +265,31 @@ defmodule JSON.LD.Context do
definition = do_create_container_definition(definition, value)
definition = do_create_language_definition(definition, value)
{definition, active, defined}
else
{definition, active, defined}
end
# 18 / 11.6) Set the term definition of term in active context to definition and set the value associated with defined's key term to true.
{%JSON.LD.Context{active | term_defs: Map.put(active.term_defs, term, definition)},
Map.put(defined, term, true)}
{
%__MODULE__{active | term_defs: Map.put(active.term_defs, term, definition)},
Map.put(defined, term, true)
}
end
defp do_create_term_definition(_, _, _, value, _),
do:
raise(JSON.LD.InvalidTermDefinitionError,
message: "#{inspect(value)} is not a valid term definition"
)
defp do_create_term_definition(_, _, _, value, _) do
raise JSON.LD.InvalidTermDefinitionError,
message: "#{inspect(value)} is not a valid term definition"
end
# 10.1)
# TODO: RDF.rb implementation says: "SPEC FIXME: @type may be nil"
defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type),
do:
raise(JSON.LD.InvalidTypeMappingError,
message: "#{inspect(type)} is not a valid type mapping"
)
@spec do_create_type_definition(TermDefinition.t(), map, map, value, map) ::
{TermDefinition.t(), t, map}
defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type) do
raise JSON.LD.InvalidTypeMappingError, message: "#{inspect(type)} is not a valid type mapping"
end
# 10.2) and 10.3)
defp do_create_type_definition(definition, active, local, %{"@type" => type}, defined) do
@ -290,6 +306,8 @@ defmodule JSON.LD.Context do
defp do_create_type_definition(definition, active, _, _, defined),
do: {definition, active, defined}
@spec do_create_reverse_definition(TermDefinition.t(), t, map, value, map) ::
{boolean, TermDefinition.t(), t, map}
# 11) If value contains the key @reverse
defp do_create_reverse_definition(
definition,
@ -346,6 +364,8 @@ defmodule JSON.LD.Context do
do: {false, definition, active, defined}
# 13)
@spec do_create_id_definition(TermDefinition.t(), t, map, String.t(), map, map) ::
{TermDefinition.t(), t, map}
defp do_create_id_definition(definition, active, local, term, %{"@id" => id}, defined)
when id != term do
# 13.1)
@ -355,12 +375,9 @@ defmodule JSON.LD.Context do
cond do
expanded_id == "@context" ->
raise JSON.LD.InvalidKeywordAliasError,
message: "cannot alias @context"
raise JSON.LD.InvalidKeywordAliasError, message: "cannot alias @context"
JSON.LD.keyword?(expanded_id) or
IRI.absolute?(expanded_id) or
blank_node_id?(expanded_id) ->
JSON.LD.keyword?(expanded_id) or IRI.absolute?(expanded_id) or blank_node_id?(expanded_id) ->
{%TermDefinition{definition | iri_mapping: expanded_id}, active, defined}
true ->
@ -376,8 +393,7 @@ defmodule JSON.LD.Context do
defp do_create_id_definition(definition, active, local, term, _, defined) do
# 14)
# TODO: The W3C spec seems to contain an error by requiring only to check for a collon.
# What's when an absolute IRI is given and an "http" term is defined in the context?
# TODO: The W3C spec seems to contain an error by requiring only to check for a collon. What's when an absolute IRI is given and an "http" term is defined in the context?
if String.contains?(term, ":") do
case compact_iri_parts(term) do
[prefix, suffix] ->
@ -414,13 +430,13 @@ defmodule JSON.LD.Context do
end
# 16.1)
@spec do_create_container_definition(TermDefinition.t(), map) :: TermDefinition.t()
defp do_create_container_definition(_, %{"@container" => container})
when container not in ~w[@list @set @index @language],
do:
raise(JSON.LD.InvalidContainerMappingError,
message:
"#{inspect(container)} is not a valid container mapping; @container must be either @list, @set, @index, or @language"
)
when container not in ~w[@list @set @index @language] do
raise JSON.LD.InvalidContainerMappingError,
message:
"#{inspect(container)} is not a valid container mapping; @container must be either @list, @set, @index, or @language"
end
# 16.2)
defp do_create_container_definition(definition, %{"@container" => container}),
@ -430,6 +446,7 @@ defmodule JSON.LD.Context do
do: definition
# 17)
@spec do_create_language_definition(TermDefinition.t(), map) :: TermDefinition.t()
defp do_create_language_definition(definition, %{"@language" => language} = value) do
unless Map.has_key?(value, "@type") do
case language do
@ -454,7 +471,8 @@ defmodule JSON.LD.Context do
Details at <https://www.w3.org/TR/json-ld-api/#inverse-context-creation>
"""
def inverse(%JSON.LD.Context{} = context) do
@spec inverse(t) :: map
def inverse(%__MODULE__{} = context) do
# 2) Initialize default language to @none. If the active context has a default language, set default language to it.
default_language = context.default_language || "@none"
@ -466,6 +484,7 @@ defmodule JSON.LD.Context do
if term_def do
# 3.2) Initialize container to @none. If there is a container mapping in term definition, set container to its associated value.
container = term_def.container_mapping || "@none"
# 3.3) Initialize iri to the value of the IRI mapping for the term definition.
iri = term_def.iri_mapping
@ -479,13 +498,11 @@ defmodule JSON.LD.Context do
{Map.put_new(type_map, "@reverse", term), language_map}
# 3.9) Otherwise, if term definition has a type mapping
%TermDefinition{type_mapping: type_mapping}
when type_mapping != false ->
%TermDefinition{type_mapping: type_mapping} when type_mapping != false ->
{Map.put_new(type_map, type_mapping, term), language_map}
# 3.10) Otherwise, if term definition has a language mapping (might be null)
%TermDefinition{language_mapping: language_mapping}
when language_mapping != false ->
%TermDefinition{language_mapping: language_mapping} when language_mapping != false ->
language = language_mapping || "@null"
{type_map, Map.put_new(language_map, language, term)}
@ -500,10 +517,7 @@ defmodule JSON.LD.Context do
result
|> Map.put_new(iri, %{})
|> Map.update(iri, %{}, fn container_map ->
Map.put(container_map, container, %{
"@type" => type_map,
"@language" => language_map
})
Map.put(container_map, container, %{"@type" => type_map, "@language" => language_map})
end)
else
result
@ -511,12 +525,8 @@ defmodule JSON.LD.Context do
end)
end
def empty?(%JSON.LD.Context{
term_defs: term_defs,
vocab: nil,
base_iri: false,
default_language: nil
})
@spec empty?(t) :: boolean
def empty?(%__MODULE__{term_defs: term_defs, vocab: nil, base_iri: false, default_language: nil})
when map_size(term_defs) == 0,
do: true

View file

@ -1,4 +1,12 @@
defmodule JSON.LD.Context.TermDefinition do
@type t :: %__MODULE__{
iri_mapping: String.t() | nil,
reverse_property: boolean,
type_mapping: boolean,
language_mapping: boolean,
container_mapping: nil
}
defstruct iri_mapping: nil,
reverse_property: false,
type_mapping: false,

View file

@ -5,113 +5,115 @@ defmodule JSON.LD.Decoder do
use RDF.Serialization.Decoder
import JSON.LD.{NodeIdentifierMap, Utils}
alias JSON.LD.NodeIdentifierMap
alias RDF.{Dataset, Graph, NS}
alias JSON.LD.{NodeIdentifierMap, Options}
alias RDF.{BlankNode, Dataset, Graph, IRI, Literal, NS, Statement, XSD}
@impl RDF.Serialization.Decoder
@spec decode(String.t(), keyword) :: {:ok, Dataset.t() | Graph.t()} | {:error, any}
def decode(content, opts \\ []) do
with {:ok, json_ld_object} <- parse_json(content),
dataset = to_rdf(json_ld_object, opts) do
with {:ok, json_ld_object} <- parse_json(content) do
dataset = to_rdf(json_ld_object, opts)
{:ok, dataset}
end
end
def to_rdf(element, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options) do
{:ok, node_id_map} = NodeIdentifierMap.start_link()
@dialyzer {:nowarn_function, to_rdf: 2}
@spec to_rdf(map, Options.t() | Enum.t()) :: Dataset.t() | Graph.t()
def to_rdf(element, options \\ %Options{}) do
{:ok, node_id_map} = NodeIdentifierMap.start_link()
try do
element
|> JSON.LD.expand(options)
|> JSON.LD.node_map(node_id_map)
|> Enum.sort_by(fn {graph_name, _} -> graph_name end)
|> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset ->
unless relative_iri?(graph_name) do
rdf_graph =
graph
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph ->
unless relative_iri?(subject) do
node
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph ->
cond do
property == "@type" ->
Graph.add(
rdf_graph,
node_to_rdf(subject),
RDF.NS.RDF.type(),
Enum.map(values, &node_to_rdf/1)
)
options = Options.new(options)
JSON.LD.keyword?(property) ->
rdf_graph
try do
element
|> JSON.LD.expand(options)
|> JSON.LD.node_map(node_id_map)
|> Enum.sort_by(fn {graph_name, _} -> graph_name end)
|> Enum.reduce(Dataset.new(), fn {graph_name, graph}, dataset ->
unless relative_iri?(graph_name) do
rdf_graph =
graph
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce(Graph.new(), fn {subject, node}, rdf_graph ->
unless relative_iri?(subject) do
node
|> Enum.sort_by(fn {property, _} -> property end)
|> Enum.reduce(rdf_graph, fn {property, values}, rdf_graph ->
cond do
property == "@type" ->
Graph.add(
rdf_graph,
node_to_rdf(subject),
NS.RDF.type(),
Enum.map(values, &node_to_rdf/1)
)
not options.produce_generalized_rdf and
blank_node_id?(property) ->
rdf_graph
JSON.LD.keyword?(property) ->
rdf_graph
relative_iri?(property) ->
rdf_graph
not options.produce_generalized_rdf and blank_node_id?(property) ->
rdf_graph
true ->
Enum.reduce(values, rdf_graph, fn
%{"@list" => list}, rdf_graph ->
with {list_triples, first} <-
list_to_rdf(list, node_id_map) do
relative_iri?(property) ->
rdf_graph
true ->
Enum.reduce(values, rdf_graph, fn
%{"@list" => list}, rdf_graph ->
with {list_triples, first} <- list_to_rdf(list, node_id_map) do
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
|> Graph.add(list_triples)
end
item, rdf_graph ->
case object_to_rdf(item) do
nil ->
rdf_graph
|> Graph.add({node_to_rdf(subject), node_to_rdf(property), first})
|> Graph.add(list_triples)
end
item, rdf_graph ->
case object_to_rdf(item) do
nil ->
rdf_graph
object ->
Graph.add(
rdf_graph,
{node_to_rdf(subject), node_to_rdf(property), object}
)
end
end)
end
end)
else
rdf_graph
end
end)
object ->
Graph.add(
rdf_graph,
{node_to_rdf(subject), node_to_rdf(property), object}
)
end
end)
end
end)
else
rdf_graph
end
end)
if Enum.empty?(rdf_graph) do
dataset
else
Dataset.add(
dataset,
rdf_graph,
if(graph_name == "@default", do: nil, else: graph_name)
)
end
else
if Enum.empty?(rdf_graph) do
dataset
else
graph_name = if graph_name == "@default", do: nil, else: graph_name
Dataset.add(dataset, rdf_graph, graph_name)
end
end)
after
NodeIdentifierMap.stop(node_id_map)
end
else
dataset
end
end)
after
NodeIdentifierMap.stop(node_id_map)
end
end
@spec parse_json(String.t(), [Jason.decode_opt()]) ::
{:ok, map} | {:error, Jason.DecodeError.t()}
def parse_json(content, _opts \\ []) do
Jason.decode(content)
end
@spec parse_json!(String.t(), [Jason.decode_opt()]) :: map
def parse_json!(content, _opts \\ []) do
Jason.decode!(content)
end
def node_to_rdf(nil), do: nil
@spec node_to_rdf(String.t()) :: IRI.t() | BlankNode.t()
def node_to_rdf(node) do
if blank_node_id?(node) do
node
@ -122,10 +124,9 @@ defmodule JSON.LD.Decoder do
end
end
@spec object_to_rdf(map) :: IRI.t() | BlankNode.t() | Literal.t() | nil
defp object_to_rdf(%{"@id" => id}) do
unless relative_iri?(id) do
node_to_rdf(id)
end
unless relative_iri?(id), do: node_to_rdf(id)
end
defp object_to_rdf(%{"@value" => value} = item) do
@ -136,9 +137,9 @@ defmodule JSON.LD.Decoder do
is_boolean(value) ->
value =
value
|> RDF.XSD.Boolean.new()
|> RDF.XSD.Boolean.canonical()
|> RDF.XSD.Boolean.lexical()
|> XSD.Boolean.new()
|> XSD.Boolean.canonical()
|> XSD.Boolean.lexical()
datatype = if is_nil(datatype), do: NS.XSD.boolean(), else: datatype
{value, datatype}
@ -146,9 +147,9 @@ defmodule JSON.LD.Decoder do
is_float(value) or (is_number(value) and datatype == to_string(NS.XSD.double())) ->
value =
value
|> RDF.XSD.Double.new()
|> RDF.XSD.Double.canonical()
|> RDF.XSD.Double.lexical()
|> XSD.Double.new()
|> XSD.Double.canonical()
|> XSD.Double.lexical()
datatype = if is_nil(datatype), do: NS.XSD.double(), else: datatype
{value, datatype}
@ -156,20 +157,16 @@ defmodule JSON.LD.Decoder do
is_integer(value) or (is_number(value) and datatype == to_string(NS.XSD.integer())) ->
value =
value
|> RDF.XSD.Integer.new()
|> RDF.XSD.Integer.canonical()
|> RDF.XSD.Integer.lexical()
|> XSD.Integer.new()
|> XSD.Integer.canonical()
|> XSD.Integer.lexical()
datatype = if is_nil(datatype), do: NS.XSD.integer(), else: datatype
{value, datatype}
is_nil(datatype) ->
datatype =
if Map.has_key?(item, "@language") do
RDF.langString()
else
NS.XSD.string()
end
if Map.has_key?(item, "@language"), do: RDF.langString(), else: NS.XSD.string()
{value, datatype}
@ -178,44 +175,44 @@ defmodule JSON.LD.Decoder do
end
if language = item["@language"] do
RDF.Literal.new(value, language: language, canonicalize: true)
Literal.new(value, language: language, canonicalize: true)
else
RDF.Literal.new(value, datatype: datatype, canonicalize: true)
Literal.new(value, datatype: datatype, canonicalize: true)
end
end
@spec list_to_rdf([map], pid) :: {[Statement.t()], IRI.t() | BlankNode.t()}
defp list_to_rdf(list, node_id_map) do
{list_triples, first, last} =
list
|> Enum.reduce({[], nil, nil}, fn item, {list_triples, first, last} ->
Enum.reduce(list, {[], nil, nil}, fn item, {list_triples, first, last} ->
case object_to_rdf(item) do
nil ->
{list_triples, first, last}
object ->
with bnode = node_to_rdf(generate_blank_node_id(node_id_map)) do
if last do
{
list_triples ++
[{last, RDF.NS.RDF.rest(), bnode}, {bnode, RDF.NS.RDF.first(), object}],
first,
bnode
}
else
{
list_triples ++ [{bnode, RDF.NS.RDF.first(), object}],
bnode,
bnode
}
end
bnode = node_to_rdf(generate_blank_node_id(node_id_map))
if last do
{
list_triples ++
[{last, NS.RDF.rest(), bnode}, {bnode, NS.RDF.first(), object}],
first,
bnode
}
else
{
list_triples ++ [{bnode, NS.RDF.first(), object}],
bnode,
bnode
}
end
end
end)
if last do
{list_triples ++ [{last, RDF.NS.RDF.rest(), RDF.NS.RDF.nil()}], first}
{list_triples ++ [{last, NS.RDF.rest(), NS.RDF.nil()}], first}
else
{[], RDF.NS.RDF.nil()}
{[], NS.RDF.nil()}
end
end

View file

@ -6,6 +6,7 @@ defmodule JSON.LD.DocumentLoader do
"""
alias JSON.LD.DocumentLoader.RemoteDocument
alias JSON.LD.Options
@callback load(String.t(), JSON.LD.Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
@callback load(String.t(), Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
end

View file

@ -2,7 +2,9 @@ defmodule JSON.LD.DocumentLoader.Default do
@behaviour JSON.LD.DocumentLoader
alias JSON.LD.DocumentLoader.RemoteDocument
alias JSON.LD.Options
@spec load(String.t(), Options.t()) :: {:ok, RemoteDocument.t()} | {:error, any}
def load(url, _options) do
with {:ok, res} <- http_get(url),
{:ok, data} <- Jason.decode(res.body) do
@ -10,6 +12,8 @@ defmodule JSON.LD.DocumentLoader.Default do
end
end
@spec http_get(String.t()) ::
{:ok, HTTPoison.Response.t() | HTTPoison.AsyncResponse.t()} | {:error, any}
defp http_get(url) do
HTTPoison.get(url, [accept: "application/ld+json"], follow_redirect: true)
rescue

View file

@ -4,7 +4,22 @@ defmodule JSON.LD.Encoder do
use RDF.Serialization.Encoder
alias RDF.{IRI, BlankNode, Literal, XSD, NS}
alias JSON.LD.Options
alias RDF.{
BlankNode,
Dataset,
Description,
Graph,
IRI,
LangString,
Literal,
NS,
Statement,
XSD
}
@type input :: Dataset.t() | Description.t() | Graph.t()
@rdf_type to_string(RDF.NS.RDF.type())
@rdf_nil to_string(RDF.NS.RDF.nil())
@ -13,108 +28,111 @@ defmodule JSON.LD.Encoder do
@rdf_list to_string(RDF.uri(RDF.NS.RDF.List))
@impl RDF.Serialization.Encoder
@spec encode(input, Options.t() | Enum.t()) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ []) do
with {:ok, json_ld_object} <- from_rdf(data, opts) do
encode_json(json_ld_object, opts)
end
end
@spec encode!(input, Options.t() | Enum.t()) :: String.t()
def encode!(data, opts \\ []) do
data
|> from_rdf!(opts)
|> encode_json!(opts)
end
def from_rdf(dataset, options \\ %JSON.LD.Options{}) do
try do
{:ok, from_rdf!(dataset, options)}
rescue
exception -> {:error, Exception.message(exception)}
end
@spec from_rdf(input, Options.t() | Enum.t()) :: {:ok, [map]} | {:error, any}
def from_rdf(dataset, options \\ %Options{}) do
{:ok, from_rdf!(dataset, options)}
rescue
exception ->
{:error, Exception.message(exception)}
end
def from_rdf!(rdf_data, options \\ %JSON.LD.Options{})
@spec from_rdf!(input, Options.t() | Enum.t()) :: [map]
def from_rdf!(rdf_data, options \\ %Options{})
def from_rdf!(%RDF.Dataset{} = dataset, options) do
with options = JSON.LD.Options.new(options) do
graph_map =
Enum.reduce(RDF.Dataset.graphs(dataset), %{}, fn graph, graph_map ->
# 3.1)
name = to_string(graph.name || "@default")
def from_rdf!(%Dataset{} = dataset, options) do
options = Options.new(options)
# 3.3)
graph_map =
if graph.name && !get_in(graph_map, ["@default", name]) do
Map.update(graph_map, "@default", %{name => %{"@id" => name}}, fn default_graph ->
Map.put(default_graph, name, %{"@id" => name})
end)
else
graph_map
end
graph_map =
Enum.reduce(Dataset.graphs(dataset), %{}, fn graph, graph_map ->
# 3.1)
name = to_string(graph.name || "@default")
# 3.2 + 3.4)
Map.put(
graph_map,
name,
node_map_from_graph(
graph,
Map.get(graph_map, name, %{}),
options.use_native_types,
options.use_rdf_type
)
)
end)
# 4)
graph_map =
Enum.reduce(graph_map, %{}, fn {name, graph_object}, graph_map ->
Map.put(graph_map, name, convert_list(graph_object))
end)
# 5+6)
Map.get(graph_map, "@default", %{})
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce([], fn {subject, node}, result ->
# 6.1)
node =
if Map.has_key?(graph_map, subject) do
Map.put(
node,
"@graph",
graph_map[subject]
|> Enum.sort_by(fn {s, _} -> s end)
|> Enum.reduce([], fn {_s, n}, graph_nodes ->
n = Map.delete(n, "usages")
if map_size(n) == 1 and Map.has_key?(n, "@id") do
graph_nodes
else
[n | graph_nodes]
end
end)
|> Enum.reverse()
)
# 3.3)
graph_map =
if graph.name && !get_in(graph_map, ["@default", name]) do
Map.update(graph_map, "@default", %{name => %{"@id" => name}}, fn default_graph ->
Map.put(default_graph, name, %{"@id" => name})
end)
else
node
graph_map
end
# 6.2)
node = Map.delete(node, "usages")
# 3.2 + 3.4)
node_map =
node_map_from_graph(
graph,
Map.get(graph_map, name, %{}),
options.use_native_types,
options.use_rdf_type
)
if map_size(node) == 1 and Map.has_key?(node, "@id") do
result
else
[node | result]
end
Map.put(graph_map, name, node_map)
end)
|> Enum.reverse()
end
# 4)
graph_map =
Enum.reduce(graph_map, %{}, fn {name, graph_object}, graph_map ->
Map.put(graph_map, name, convert_list(graph_object))
end)
# 5+6)
Map.get(graph_map, "@default", %{})
|> Enum.sort_by(fn {subject, _} -> subject end)
|> Enum.reduce([], fn {subject, node}, result ->
# 6.1)
node =
if Map.has_key?(graph_map, subject) do
Map.put(
node,
"@graph",
graph_map[subject]
|> Enum.sort_by(fn {s, _} -> s end)
|> Enum.reduce([], fn {_s, n}, graph_nodes ->
n = Map.delete(n, "usages")
if map_size(n) == 1 and Map.has_key?(n, "@id") do
graph_nodes
else
[n | graph_nodes]
end
end)
|> Enum.reverse()
)
else
node
end
# 6.2)
node = Map.delete(node, "usages")
if map_size(node) == 1 and Map.has_key?(node, "@id") do
result
else
[node | result]
end
end)
|> Enum.reverse()
end
def from_rdf!(rdf_data, options),
do: rdf_data |> RDF.Dataset.new() |> from_rdf!(options)
do: rdf_data |> Dataset.new() |> from_rdf!(options)
# 3.5)
@spec node_map_from_graph(Graph.t(), map, boolean, boolean) :: map
defp node_map_from_graph(graph, current, use_native_types, use_rdf_type) do
Enum.reduce(graph, current, fn {subject, predicate, object}, node_map ->
{subject, predicate, node_object} = {to_string(subject), to_string(predicate), nil}
@ -133,11 +151,7 @@ defmodule JSON.LD.Encoder do
if is_node_object and !use_rdf_type and predicate == @rdf_type do
node =
Map.update(node, "@type", [node_object], fn types ->
if node_object in types do
types
else
types ++ [node_object]
end
if node_object in types, do: types, else: types ++ [node_object]
end)
{node, node_map}
@ -146,25 +160,15 @@ defmodule JSON.LD.Encoder do
node =
Map.update(node, predicate, [value], fn objects ->
if value in objects do
objects
else
objects ++ [value]
end
if value in objects, do: objects, else: objects ++ [value]
end)
node_map =
if is_node_object do
usage = %{
"node" => node,
"property" => predicate,
"value" => value
}
usage = %{"node" => node, "property" => predicate, "value" => value}
Map.update(node_map, node_object, %{"usages" => [usage]}, fn object_node ->
Map.update(object_node, "usages", [usage], fn usages ->
usages ++ [usage]
end)
Map.update(object_node, "usages", [usage], fn usages -> usages ++ [usage] end)
end)
else
node_map
@ -178,8 +182,8 @@ defmodule JSON.LD.Encoder do
|> update_node_usages
end
# This function is necessary because we have no references and must update the
# node member of the usage maps with later enhanced usages
# This function is necessary because we have no references and must update the node member of the usage maps with later enhanced usages
@spec update_node_usages(map) :: map
defp update_node_usages(node_map) do
Enum.reduce(node_map, node_map, fn
{subject, %{"usages" => _usages} = _node}, node_map ->
@ -196,8 +200,8 @@ defmodule JSON.LD.Encoder do
end)
end
# This function is necessary because we have no references and use this
# instead to update the head by path
# This function is necessary because we have no references and use this instead to update the head by path
@spec update_head(map, [String.t()], map, map) :: map
defp update_head(graph_object, path, old, new) do
update_in(graph_object, path, fn objects ->
Enum.map(objects, fn
@ -208,6 +212,7 @@ defmodule JSON.LD.Encoder do
end
# 4)
@spec convert_list(map) :: map
defp convert_list(%{@rdf_nil => nil_node} = graph_object) do
Enum.reduce(
nil_node["usages"],
@ -264,6 +269,7 @@ defmodule JSON.LD.Encoder do
defp convert_list(graph_object), do: graph_object
# 4.3.3)
@spec extract_list(map, [map], [String.t()]) :: {[map], [String.t()], [String.t()], map}
defp extract_list(usage, list \\ [], list_nodes \\ [])
defp extract_list(
@ -314,6 +320,7 @@ defmodule JSON.LD.Encoder do
),
do: {list, list_nodes, [subject, property], head}
@spec rdf_to_object(Statement.object(), boolean) :: map
defp rdf_to_object(%IRI{} = iri, _use_native_types) do
%{"@id" => to_string(iri)}
end
@ -353,7 +360,7 @@ defmodule JSON.LD.Encoder do
end
else
cond do
datatype == RDF.LangString ->
datatype == LangString ->
{converted_value, type, Map.put(result, "@language", Literal.language(literal))}
datatype == XSD.String ->
@ -373,10 +380,13 @@ defmodule JSON.LD.Encoder do
)
end
@spec encode_json(any, [Jason.encode_opt()]) ::
{:ok, String.t()} | {:error, Jason.EncodeError.t() | Exception.t()}
defp encode_json(value, opts) do
Jason.encode(value, opts)
end
@spec encode_json!(any, [Jason.encode_opt()]) :: String.t()
defp encode_json!(value, opts) do
Jason.encode!(value, opts)
end

View file

@ -3,37 +3,30 @@ defmodule JSON.LD.Expansion do
import JSON.LD.{IRIExpansion, Utils}
def expand(input, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
active_context = JSON.LD.Context.new(options) do
active_context =
case options.expand_context do
%{"@context" => context} ->
JSON.LD.Context.update(active_context, context)
alias JSON.LD.{Context, Options}
alias JSON.LD.Context.TermDefinition
%{} = context ->
JSON.LD.Context.update(active_context, context)
@spec expand(map, Options.t() | Enum.t()) :: [map]
def expand(input, options \\ %Options{}) do
options = Options.new(options)
active_context = Context.new(options)
nil ->
active_context
end
case do_expand(active_context, nil, input, options) do
result = %{"@graph" => graph} when map_size(result) == 1 ->
graph
nil ->
[]
result when not is_list(result) ->
[result]
result ->
result
active_context =
case options.expand_context do
%{"@context" => context} -> Context.update(active_context, context)
%{} = context -> Context.update(active_context, context)
nil -> active_context
end
case do_expand(active_context, nil, input, options) do
result = %{"@graph" => graph} when map_size(result) == 1 -> graph
nil -> []
result when not is_list(result) -> [result]
result -> result
end
end
@spec do_expand(Context.t(), String.t() | nil, any | nil, Options.t()) :: map | [map] | nil
defp do_expand(active_context, active_property, element, options)
# 1) If element is null, return null.
@ -55,37 +48,30 @@ defmodule JSON.LD.Expansion do
term_def = active_context.term_defs[active_property]
container_mapping = term_def && term_def.container_mapping
element
|> Enum.reduce([], fn item, result ->
Enum.reduce(element, [], fn item, result ->
expanded_item = do_expand(active_context, active_property, item, options)
if (active_property == "@list" or container_mapping == "@list") and
(is_list(expanded_item) or Map.has_key?(expanded_item, "@list")),
do:
raise(JSON.LD.ListOfListsError,
message: "List of lists in #{inspect(element)}"
)
(is_list(expanded_item) or Map.has_key?(expanded_item, "@list")) do
raise JSON.LD.ListOfListsError, message: "List of lists in #{inspect(element)}"
end
case expanded_item do
nil ->
result
list when is_list(list) ->
result ++ list
expanded_item ->
result ++ [expanded_item]
nil -> result
list when is_list(list) -> result ++ list
expanded_item -> result ++ [expanded_item]
end
end)
end
# 4) - 13)
@dialyzer {:nowarn_function, do_expand: 4}
defp do_expand(active_context, active_property, element, options)
when is_map(element) do
# 5)
active_context =
if Map.has_key?(element, "@context") do
JSON.LD.Context.update(active_context, Map.get(element, "@context"), [], options)
Context.update(active_context, Map.get(element, "@context"), [], options)
else
active_context
end
@ -106,20 +92,18 @@ defmodule JSON.LD.Expansion do
# expanded_property is not a keyword
if JSON.LD.keyword?(expanded_property) do
# 7.4.1)
if active_property == "@reverse",
do:
raise(JSON.LD.InvalidReversePropertyMapError,
message:
"An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
)
if active_property == "@reverse" do
raise JSON.LD.InvalidReversePropertyMapError,
message:
"An invalid reverse property map has been detected. No keywords apart from @context are allowed in reverse property maps."
end
# 7.4.2)
if Map.has_key?(result, expanded_property),
do:
raise(JSON.LD.CollidingKeywordsError,
message:
"Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
)
if Map.has_key?(result, expanded_property) do
raise JSON.LD.CollidingKeywordsError,
message:
"Two properties which expand to the same keyword have been detected. This might occur if a keyword and an alias thereof are used at the same time."
end
expanded_value =
case expanded_property do
@ -168,21 +152,21 @@ defmodule JSON.LD.Expansion do
# 7.4.7)
"@language" ->
if is_binary(value),
do: String.downcase(value),
else:
raise(JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect(value)} is not a valid language-tag"
)
if is_binary(value) do
String.downcase(value)
else
raise JSON.LD.InvalidLanguageTaggedStringError,
message: "#{inspect(value)} is not a valid language-tag"
end
# 7.4.8)
"@index" ->
if is_binary(value),
do: value,
else:
raise(JSON.LD.InvalidIndexValueError,
message: "#{inspect(value)} is not a valid @index value"
)
if is_binary(value) do
value
else
raise JSON.LD.InvalidIndexValueError,
message: "#{inspect(value)} is not a valid @index value"
end
# 7.4.9)
"@list" ->
@ -193,18 +177,14 @@ defmodule JSON.LD.Expansion do
value = do_expand(active_context, active_property, value, options)
# Spec FIXME: need to be sure that result is a list [from RDF.rb implementation]
value =
if is_list(value),
do: value,
else: [value]
value = if is_list(value), do: value, else: [value]
# If expanded value is a list object, a list of lists error has been detected and processing is aborted.
# Spec FIXME: Also look at each object if result is a list [from RDF.rb implementation]
if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end),
do:
raise(JSON.LD.ListOfListsError,
message: "List of lists in #{inspect(value)}"
)
if Enum.any?(value, fn v -> Map.has_key?(v, "@list") end) do
raise JSON.LD.ListOfListsError,
message: "List of lists in #{inspect(value)}"
end
value
end
@ -215,11 +195,10 @@ defmodule JSON.LD.Expansion do
# 7.4.11)
"@reverse" ->
unless is_map(value),
do:
raise(JSON.LD.InvalidReverseValueError,
message: "#{inspect(value)} is not a valid @reverse value"
)
unless is_map(value) do
raise JSON.LD.InvalidReverseValueError,
message: "#{inspect(value)} is not a valid @reverse value"
end
# 7.4.11.1)
expanded_value = do_expand(active_context, "@reverse", value, options)
@ -227,17 +206,17 @@ defmodule JSON.LD.Expansion do
# 7.4.11.2) If expanded value contains an @reverse member, i.e., properties that are reversed twice, execute for each of its property and item the following steps:
new_result =
if Map.has_key?(expanded_value, "@reverse") do
Enum.reduce(expanded_value["@reverse"], result, fn {property, item},
new_result ->
items =
if is_list(item),
do: item,
else: [item]
Enum.reduce(
expanded_value["@reverse"],
result,
fn {property, item}, new_result ->
items = if is_list(item), do: item, else: [item]
Map.update(new_result, property, items, fn members ->
members ++ items
end)
end)
Map.update(new_result, property, items, fn members ->
members ++ items
end)
end
)
else
result
end
@ -249,12 +228,11 @@ defmodule JSON.LD.Expansion do
Enum.reduce(expanded_value, Map.get(new_result, "@reverse", %{}), fn
{property, items}, reverse_map when property != "@reverse" ->
Enum.each(items, fn item ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do:
raise(JSON.LD.InvalidReversePropertyValueError,
message:
"invalid value for a reverse property in #{inspect(item)}"
)
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list") do
raise JSON.LD.InvalidReversePropertyValueError,
message:
"invalid value for a reverse property in #{inspect(item)}"
end
end)
Map.update(reverse_map, property, items, fn members ->
@ -278,14 +256,9 @@ defmodule JSON.LD.Expansion do
# 7.4.12)
case expanded_value do
nil ->
result
{:skip, new_result} ->
new_result
expanded_value ->
Map.put(result, expanded_property, expanded_value)
nil -> result
{:skip, new_result} -> new_result
expanded_value -> Map.put(result, expanded_property, expanded_value)
end
else
term_def = active_context.term_defs[key]
@ -298,16 +271,10 @@ defmodule JSON.LD.Expansion do
|> Enum.sort_by(fn {language, _} -> language end)
|> Enum.reduce([], fn {language, language_value}, language_map_result ->
language_map_result ++
(if(is_list(language_value),
do: language_value,
else: [language_value]
)
(if(is_list(language_value), do: language_value, else: [language_value])
|> Enum.map(fn
item when is_binary(item) ->
%{
"@value" => item,
"@language" => String.downcase(language)
}
%{"@value" => item, "@language" => String.downcase(language)}
item ->
raise JSON.LD.InvalidLanguageMapValueError,
@ -323,16 +290,11 @@ defmodule JSON.LD.Expansion do
index_map_result ++
(
index_value =
if(is_list(index_value),
do: index_value,
else: [index_value]
)
if is_list(index_value), do: index_value, else: [index_value]
index_value = do_expand(active_context, key, index_value, options)
Enum.map(index_value, fn item ->
Map.put_new(item, "@index", index)
end)
Enum.map(index_value, fn item -> Map.put_new(item, "@index", index) end)
)
end)
@ -351,10 +313,7 @@ defmodule JSON.LD.Expansion do
!(is_map(expanded_value) && Map.has_key?(expanded_value, "@list")) do
%{
"@list" =>
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
)
if(is_list(expanded_value), do: expanded_value, else: [expanded_value])
}
else
expanded_value
@ -367,16 +326,12 @@ defmodule JSON.LD.Expansion do
reverse_map = Map.get(result, "@reverse", %{})
reverse_map =
if(is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
)
if(is_list(expanded_value), do: expanded_value, else: [expanded_value])
|> Enum.reduce(reverse_map, fn item, reverse_map ->
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list"),
do:
raise(JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect(item)}"
)
if Map.has_key?(item, "@value") or Map.has_key?(item, "@list") do
raise JSON.LD.InvalidReversePropertyValueError,
message: "invalid value for a reverse property in #{inspect(item)}"
end
Map.update(reverse_map, expanded_property, [item], fn members ->
members ++ [item]
@ -386,9 +341,7 @@ defmodule JSON.LD.Expansion do
Map.put(result, "@reverse", reverse_map)
else
expanded_value =
if is_list(expanded_value),
do: expanded_value,
else: [expanded_value]
if is_list(expanded_value), do: expanded_value, else: [expanded_value]
Map.update(result, expanded_property, expanded_value, fn values ->
expanded_value ++ values
@ -409,12 +362,12 @@ defmodule JSON.LD.Expansion do
# 8)
%{"@value" => value} ->
# 8.1)
with keys = Map.keys(result) do
if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) ||
("@language" in keys and "@type" in keys) do
raise JSON.LD.InvalidValueObjectError,
message: "value object with disallowed members"
end
keys = Map.keys(result)
if Enum.any?(keys, &(&1 not in ~w[@value @language @type @index])) ||
("@language" in keys and "@type" in keys) do
raise JSON.LD.InvalidValueObjectError,
message: "value object with disallowed members"
end
cond do
@ -472,11 +425,10 @@ defmodule JSON.LD.Expansion do
result
end
@spec validate_set_or_list_object(map) :: true
defp validate_set_or_list_object(object) when map_size(object) == 1, do: true
defp validate_set_or_list_object(object = %{"@index" => _})
when map_size(object) == 2,
do: true
defp validate_set_or_list_object(%{"@index" => _} = object) when map_size(object) == 2, do: true
defp validate_set_or_list_object(object) do
raise JSON.LD.InvalidSetOrListObjectError,
@ -486,36 +438,36 @@ defmodule JSON.LD.Expansion do
@doc """
Details at <http://json-ld.org/spec/latest/json-ld-api/#value-expansion>
"""
@spec expand_value(Context.t(), String.t(), any) :: map
def expand_value(active_context, active_property, value) do
with term_def =
Map.get(active_context.term_defs, active_property, %JSON.LD.Context.TermDefinition{}) do
cond do
term_def.type_mapping == "@id" ->
%{"@id" => expand_iri(value, active_context, true, false)}
term_def = Map.get(active_context.term_defs, active_property, %TermDefinition{})
term_def.type_mapping == "@vocab" ->
%{"@id" => expand_iri(value, active_context, true, true)}
cond do
term_def.type_mapping == "@id" ->
%{"@id" => expand_iri(value, active_context, true, false)}
type_mapping = term_def.type_mapping ->
%{"@value" => value, "@type" => type_mapping}
term_def.type_mapping == "@vocab" ->
%{"@id" => expand_iri(value, active_context, true, true)}
is_binary(value) ->
language_mapping = term_def.language_mapping
type_mapping = term_def.type_mapping ->
%{"@value" => value, "@type" => type_mapping}
cond do
language_mapping ->
%{"@value" => value, "@language" => language_mapping}
is_binary(value) ->
language_mapping = term_def.language_mapping
language_mapping == false && active_context.default_language ->
%{"@value" => value, "@language" => active_context.default_language}
cond do
language_mapping ->
%{"@value" => value, "@language" => language_mapping}
true ->
%{"@value" => value}
end
language_mapping == false && active_context.default_language ->
%{"@value" => value, "@language" => active_context.default_language}
true ->
%{"@value" => value}
end
true ->
%{"@value" => value}
end
true ->
%{"@value" => value}
end
end
end

View file

@ -2,61 +2,65 @@ defmodule JSON.LD.Flattening do
@moduledoc nil
import JSON.LD.{NodeIdentifierMap, Utils}
alias JSON.LD.NodeIdentifierMap
def flatten(input, context \\ nil, options \\ %JSON.LD.Options{}) do
with options = JSON.LD.Options.new(options),
expanded = JSON.LD.expand(input, options),
node_map = node_map(expanded) do
default_graph =
Enum.reduce(node_map, node_map["@default"], fn
{"@default", _}, default_graph ->
default_graph
alias JSON.LD.{NodeIdentifierMap, Options}
{graph_name, graph}, default_graph ->
entry =
if Map.has_key?(default_graph, graph_name) do
default_graph[graph_name]
else
%{"@id" => graph_name}
end
@dialyzer {:nowarn_function, flatten: 3}
@spec flatten(map | [map], map | nil, Options.t() | Enum.t()) :: [map]
def flatten(input, context \\ nil, options \\ %Options{}) do
options = Options.new(options)
expanded = JSON.LD.expand(input, options)
node_map = node_map(expanded)
graph_entry =
graph
|> Stream.reject(fn {_, node} ->
Map.has_key?(node, "@id") and map_size(node) == 1
end)
|> Enum.sort_by(fn {id, _} -> id end)
# TODO: Spec fixme: Spec doesn't handle the case, when a "@graph" member already exists
|> Enum.reduce(Map.get(entry, "@graph", []), fn {_, node}, graph_entry ->
[node | graph_entry]
end)
|> Enum.reverse()
default_graph =
Enum.reduce(node_map, node_map["@default"], fn
{"@default", _}, default_graph ->
default_graph
Map.put(default_graph, graph_name, Map.put(entry, "@graph", graph_entry))
end)
{graph_name, graph}, default_graph ->
entry =
if Map.has_key?(default_graph, graph_name) do
default_graph[graph_name]
else
%{"@id" => graph_name}
end
flattened =
default_graph
|> Enum.sort_by(fn {id, _} -> id end)
|> Enum.reduce([], fn {_, node}, flattened ->
if not (Enum.count(node) == 1 and Map.has_key?(node, "@id")) do
[node | flattened]
else
flattened
end
end)
|> Enum.reverse()
graph_entry =
graph
|> Stream.reject(fn {_, node} ->
Map.has_key?(node, "@id") and map_size(node) == 1
end)
|> Enum.sort_by(fn {id, _} -> id end)
# TODO: Spec fixme: Spec doesn't handle the case, when a "@graph" member already exists
|> Enum.reduce(Map.get(entry, "@graph", []), fn {_, node}, graph_entry ->
[node | graph_entry]
end)
|> Enum.reverse()
# TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other implementations (Ruby, Java, Go, ...)
if context && !Enum.empty?(flattened) do
JSON.LD.compact(flattened, context, options)
else
flattened
end
Map.put(default_graph, graph_name, Map.put(entry, "@graph", graph_entry))
end)
flattened =
default_graph
|> Enum.sort_by(fn {id, _} -> id end)
|> Enum.reduce([], fn {_, node}, flattened ->
if not (Enum.count(node) == 1 and Map.has_key?(node, "@id")) do
[node | flattened]
else
flattened
end
end)
|> Enum.reverse()
# TODO: Spec fixme: !Enum.empty?(flattened) is not in the spec, but in other implementations (Ruby, Java, Go, ...)
if context && !Enum.empty?(flattened) do
JSON.LD.compact(flattened, context, options)
else
flattened
end
end
@spec node_map([map], pid | nil) :: map
def node_map(input, node_id_map \\ nil)
def node_map(input, nil) do
@ -78,6 +82,15 @@ defmodule JSON.LD.Flattening do
Details at <https://www.w3.org/TR/json-ld-api/#node-map-generation>
"""
@spec generate_node_map(
[map] | map,
map,
pid,
String.t(),
String.t() | nil,
String.t() | nil,
pid | nil
) :: map
def generate_node_map(
element,
node_map,
@ -154,13 +167,8 @@ defmodule JSON.LD.Flattening do
if is_nil(list) do
if node do
update_in(node_map, [active_graph, active_subject, active_property], fn
nil ->
[element]
items ->
unless element in items,
do: items ++ [element],
else: items
nil -> [element]
items -> unless element in items, do: items ++ [element], else: items
end)
else
node_map
@ -208,11 +216,7 @@ defmodule JSON.LD.Flattening do
id =
if id do
if blank_node_id?(id) do
generate_blank_node_id(node_id_map, id)
else
id
end
if blank_node_id?(id), do: generate_blank_node_id(node_id_map, id), else: id
# 6.2)
else
@ -241,9 +245,7 @@ defmodule JSON.LD.Flattening do
[active_subject]
items ->
unless active_subject in items,
do: items ++ [active_subject],
else: items
unless active_subject in items, do: items ++ [active_subject], else: items
end)
else
node_map
@ -260,9 +262,7 @@ defmodule JSON.LD.Flattening do
[reference]
items ->
unless reference in items,
do: items ++ [reference],
else: items
unless reference in items, do: items ++ [reference], else: items
end)
# 6.6.3) TODO: Spec fixme: specs says to add ELEMENT to @list member, should be REFERENCE
@ -281,13 +281,8 @@ defmodule JSON.LD.Flattening do
node_map =
Enum.reduce(element["@type"], node_map, fn type, node_map ->
update_in(node_map, [active_graph, id, "@type"], fn
nil ->
[type]
items ->
unless type in items,
do: items ++ [type],
else: items
nil -> [type]
items -> unless type in items, do: items ++ [type], else: items
end)
end)
@ -366,9 +361,7 @@ defmodule JSON.LD.Flattening do
node_map =
unless Map.has_key?(node_map[active_graph][id], property) do
update_in(node_map, [active_graph, id], fn node ->
Map.put(node, property, [])
end)
update_in(node_map, [active_graph, id], fn node -> Map.put(node, property, []) end)
else
node_map
end
@ -378,6 +371,7 @@ defmodule JSON.LD.Flattening do
end
end
@spec deep_compare(map | [map], map | [map]) :: boolean
defp deep_compare(v1, v2) when is_map(v1) and is_map(v2) do
Enum.count(v1) == Enum.count(v2) &&
Enum.all?(v1, fn {k, v} ->
@ -392,18 +386,22 @@ defmodule JSON.LD.Flattening do
defp deep_compare(v, v), do: true
defp deep_compare(_, _), do: false
@spec new_list :: Agent.on_start()
defp new_list do
Agent.start_link(fn -> %{"@list" => []} end)
end
@spec terminate_list(pid) :: :ok
defp terminate_list(pid) do
Agent.stop(pid)
:ok = Agent.stop(pid)
end
@spec get_list(pid) :: map
defp get_list(pid) do
Agent.get(pid, fn list_node -> list_node end)
end
@spec append_to_list(pid, map) :: :ok
defp append_to_list(pid, element) do
Agent.update(pid, fn list_node ->
Map.update(list_node, "@list", [element], fn list -> list ++ [element] end)

View file

@ -1,12 +1,16 @@
defmodule JSON.LD.IRIExpansion do
import JSON.LD.Utils
alias JSON.LD.Context
# to allow this to be used in function guard clauses, we redefine this here
@keywords JSON.LD.keywords()
@doc """
see http://json-ld.org/spec/latest/json-ld-api/#iri-expansion
"""
@spec expand_iri(String.t(), Context.t(), boolean, boolean, map | nil, map | nil) ::
{String.t(), Context.t(), map} | String.t()
def expand_iri(
value,
active_context,
@ -32,7 +36,7 @@ defmodule JSON.LD.IRIExpansion do
if local_context && local_context[value] && defined[value] != true do
local_def = local_context[value]
JSON.LD.Context.create_term_definition(
Context.create_term_definition(
active_context,
local_context,
value,
@ -59,7 +63,7 @@ defmodule JSON.LD.IRIExpansion do
if local_context && local_context[prefix] && defined[prefix] != true do
local_def = local_context[prefix]
JSON.LD.Context.create_term_definition(
Context.create_term_definition(
active_context,
local_context,
prefix,
@ -93,12 +97,12 @@ defmodule JSON.LD.IRIExpansion do
# 6) Otherwise, if document relative is true, set value to the result of resolving value against the base IRI. Only the basic algorithm in section 5.2 of [RFC3986] is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed. Characters additionally allowed in IRI references are treated in the same way that unreserved characters are treated in URI references, per section 6.5 of [RFC3987].
doc_relative ->
{absolute_iri(value, JSON.LD.Context.base(active_context)), active_context, defined}
{absolute_iri(value, Context.base(active_context)), active_context, defined}
# TODO: RDF.rb's implementation differs from the spec here, by checking if base_iri is actually present in the previous clause and adding the following additional clause. Another Spec error?
# if local_context && RDF::URI(value).relative?
# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted.
# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}"
# if local_context && RDF::URI(value).relative?
# # If local context is not null and value is not an absolute IRI, an invalid IRI mapping error has been detected and processing is aborted.
# raise JSON.LD.InvalidIRIMappingError, message: "not an absolute IRI: #{value}"
# 7) Return value as is.
true ->
{value, active_context, defined}

View file

@ -5,12 +5,14 @@ defmodule JSON.LD.NodeIdentifierMap do
# Client API
@spec start_link(keyword) :: GenServer.on_start()
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
@spec stop(GenServer.server(), atom, timeout) :: :ok
def stop(pid, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(pid, reason, timeout)
:ok = GenServer.stop(pid, reason, timeout)
end
@doc """
@ -18,32 +20,28 @@ defmodule JSON.LD.NodeIdentifierMap do
Details at <https://www.w3.org/TR/json-ld-api/#generate-blank-node-identifier>
"""
@spec generate_blank_node_id(GenServer.server(), String.t() | nil) :: String.t()
def generate_blank_node_id(pid, identifier \\ nil) do
GenServer.call(pid, {:generate_id, identifier})
end
# Server Callbacks
@spec init(:ok) :: {:ok, map}
def init(:ok) do
{:ok, %{map: %{}, counter: 0}}
end
@spec handle_call({:generate_id, String.t() | nil}, GenServer.from(), map) ::
{:reply, String.t(), map}
def handle_call({:generate_id, identifier}, _, %{map: map, counter: counter} = state) do
if identifier && map[identifier] do
{:reply, map[identifier], state}
else
blank_node_id = "_:b#{counter}"
map = if identifier, do: Map.put(map, identifier, blank_node_id), else: map
{:reply, blank_node_id,
%{
counter: counter + 1,
map:
if identifier do
Map.put(map, identifier, blank_node_id)
else
map
end
}}
{:reply, blank_node_id, %{counter: counter + 1, map: map}}
end
end
end

View file

@ -5,6 +5,17 @@ defmodule JSON.LD.Options do
as specified at <https://www.w3.org/TR/json-ld-api/#the-jsonldoptions-type>
"""
@type t :: %__MODULE__{
base: String.t() | nil,
compact_arrays: boolean,
document_loader: nil,
expand_context: map | nil,
produce_generalized_rdf: boolean,
use_rdf_type: boolean,
use_native_types: boolean,
processing_mode: String.t()
}
defstruct base: nil,
compact_arrays: true,
document_loader: nil,
@ -14,7 +25,10 @@ defmodule JSON.LD.Options do
use_native_types: false,
processing_mode: "json-ld-1.0"
def new(), do: %JSON.LD.Options{}
def new(%JSON.LD.Options{} = options), do: options
def new(options), do: struct(JSON.LD.Options, options)
@spec new :: t
def new, do: %__MODULE__{}
@spec new(t | Enum.t()) :: t
def new(%__MODULE__{} = options), do: options
def new(options), do: struct(__MODULE__, options)
end

View file

@ -11,24 +11,28 @@ defmodule JSON.LD.Utils do
Characters additionally allowed in IRI references are treated in the same way that unreserved
characters are treated in URI references, per [section 6.5 of RFC3987](http://tools.ietf.org/html/rfc3987#section-6.5)
"""
@spec absolute_iri(String.t(), String.t() | nil) :: IRI.coercible() | nil
def absolute_iri(value, base_iri)
def absolute_iri(value, nil),
do: value
def absolute_iri(value, base_iri),
do: value |> RDF.IRI.absolute(base_iri) |> to_string
do: value |> IRI.absolute(base_iri) |> to_string
@spec relative_iri?(String.t()) :: boolean
def relative_iri?(value),
do: not (JSON.LD.keyword?(value) or IRI.absolute?(value) or blank_node_id?(value))
@spec compact_iri_parts(String.t(), boolean) :: [String.t()] | nil
def compact_iri_parts(compact_iri, exclude_bnode \\ true) do
with [prefix, suffix] <- String.split(compact_iri, ":", parts: 2) do
if not String.starts_with?(suffix, "//") and
not (exclude_bnode and prefix == "_"),
do: [prefix, suffix]
else
_ -> nil
case String.split(compact_iri, ":", parts: 2) do
[prefix, suffix] ->
if not String.starts_with?(suffix, "//") and not (exclude_bnode and prefix == "_"),
do: [prefix, suffix]
_ ->
nil
end
end
@ -42,20 +46,23 @@ defmodule JSON.LD.Utils do
see <https://www.w3.org/TR/json-ld-api/#dfn-blank-node-identifier>
"""
@spec blank_node_id?(String.t()) :: boolean
def blank_node_id?("_:" <> _), do: true
def blank_node_id?(_), do: false
def scalar?(value)
when is_binary(value) or is_number(value) or
is_boolean(value),
do: true
@spec scalar?(any) :: boolean
def scalar?(value) when is_binary(value) or is_number(value) or is_boolean(value), do: true
def scalar?(_), do: false
@spec list?(map | nil) :: boolean
def list?(%{"@list" => _}), do: true
def list?(_), do: false
@spec index?(map | nil) :: boolean
def index?(%{"@index" => _}), do: true
def index?(_), do: false
@spec value?(map | nil) :: boolean
def value?(%{"@value" => _}), do: true
def value?(_), do: false
end

View file

@ -3,13 +3,13 @@ defmodule JSON.LD do
import RDF.Sigils
alias JSON.LD.{Compaction, Context, Expansion, Flattening, Options}
@id ~I<http://www.w3.org/ns/formats/JSON-LD>
@name :jsonld
@extension "jsonld"
@media_type "application/ld+json"
def options, do: JSON.LD.Options.new()
@keywords ~w[
@base
@container
@ -28,16 +28,21 @@ defmodule JSON.LD do
:
]
@spec options :: Options.t()
def options, do: Options.new()
@doc """
The set of all JSON-LD keywords.
see <https://www.w3.org/TR/json-ld/#syntax-tokens-and-keywords>
"""
@spec keywords :: [String.t()]
def keywords, do: @keywords
@doc """
Returns if the given value is a JSON-LD keyword.
"""
@spec keyword?(String.t()) :: boolean
def keyword?(value) when is_binary(value) and value in @keywords, do: true
def keyword?(_value), do: false
@ -52,8 +57,9 @@ defmodule JSON.LD do
Details at <http://json-ld.org/spec/latest/json-ld-api/#expansion-algorithm>
"""
defdelegate expand(input, options \\ %JSON.LD.Options{}),
to: JSON.LD.Expansion
@spec expand(map, Options.t() | Enum.t()) :: [map]
defdelegate expand(input, options \\ %Options{}),
to: Expansion
@doc """
Compacts the given input according to the steps in the JSON-LD Compaction Algorithm.
@ -68,8 +74,9 @@ defmodule JSON.LD do
Details at <https://www.w3.org/TR/json-ld-api/#compaction-algorithms>
"""
defdelegate compact(input, context, options \\ %JSON.LD.Options{}),
to: JSON.LD.Compaction
@spec compact(map | [map], map | nil, Options.t() | Enum.t()) :: map
defdelegate compact(input, context, options \\ %Options{}),
to: Compaction
@doc """
Flattens the given input according to the steps in the JSON-LD Flattening Algorithm.
@ -83,8 +90,9 @@ defmodule JSON.LD do
Details at <https://www.w3.org/TR/json-ld-api/#flattening-algorithms>
"""
defdelegate flatten(input, context \\ nil, options \\ %JSON.LD.Options{}),
to: JSON.LD.Flattening
@spec flatten(map | [map], map | nil, Options.t() | Enum.t()) :: [map]
defdelegate flatten(input, context \\ nil, options \\ %Options{}),
to: Flattening
@doc """
Generator function for `JSON.LD.Context`s.
@ -92,17 +100,19 @@ defmodule JSON.LD do
You can either pass a map with a `"@context"` key having the JSON-LD context
object its value, or the JSON-LD context object directly.
"""
def context(args, opts \\ %JSON.LD.Options{})
@spec context(map, Options.t()) :: Context.t()
def context(args, opts \\ %Options{})
def context(%{"@context" => _} = object, options),
do: JSON.LD.Context.create(object, options)
do: Context.create(object, options)
def context(context, options),
do: JSON.LD.Context.create(%{"@context" => context}, options)
do: Context.create(%{"@context" => context}, options)
@doc """
Generator function for JSON-LD node maps.
"""
def node_map(input, node_id_map \\ nil),
do: JSON.LD.Flattening.node_map(input, node_id_map)
@spec node_map([map], pid | nil) :: map
defdelegate node_map(input, node_id_map \\ nil),
to: Flattening
end