Add RDF.Dataset.prefixes/1
This commit is contained in:
parent
29a860d969
commit
15002a0bbb
6 changed files with 52 additions and 24 deletions
|
@ -9,6 +9,7 @@ This project adheres to [Semantic Versioning](http://semver.org/) and
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
- `RDF.Dataset.prefixes/1` for getting an aggregated `RDF.PrefixMap` over all graphs
|
||||||
- `RDF.PrefixMap.put/3` for adding a prefix mapping and overwrite an existing one
|
- `RDF.PrefixMap.put/3` for adding a prefix mapping and overwrite an existing one
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
|
@ -14,6 +14,12 @@ defmodule RDF.BlankNode.Increment do
|
||||||
|
|
||||||
alias RDF.BlankNode
|
alias RDF.BlankNode
|
||||||
|
|
||||||
|
@type state :: %{
|
||||||
|
optional(:prefix) => String.t(),
|
||||||
|
map: map,
|
||||||
|
counter: pos_integer
|
||||||
|
}
|
||||||
|
|
||||||
@impl BlankNode.Generator.Algorithm
|
@impl BlankNode.Generator.Algorithm
|
||||||
def init(%{prefix: prefix} = opts) do
|
def init(%{prefix: prefix} = opts) do
|
||||||
opts
|
opts
|
||||||
|
|
|
@ -17,7 +17,7 @@ defmodule RDF.Dataset do
|
||||||
|
|
||||||
@behaviour Access
|
@behaviour Access
|
||||||
|
|
||||||
alias RDF.{Graph, Description, IRI, Statement, PropertyMap}
|
alias RDF.{Graph, Description, IRI, Statement, PrefixMap, PropertyMap}
|
||||||
import RDF.Statement, only: [coerce_subject: 1, coerce_graph_name: 1]
|
import RDF.Statement, only: [coerce_subject: 1, coerce_graph_name: 1]
|
||||||
import RDF.Utils
|
import RDF.Utils
|
||||||
|
|
||||||
|
@ -853,6 +853,22 @@ defmodule RDF.Dataset do
|
||||||
|
|
||||||
def equal?(_, _), do: false
|
def equal?(_, _), do: false
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns the aggregated prefixes of all graphs of `dataset` as a `RDF.PrefixMap`.
|
||||||
|
"""
|
||||||
|
@spec prefixes(t) :: PrefixMap.t() | nil
|
||||||
|
def prefixes(%__MODULE__{} = dataset) do
|
||||||
|
dataset
|
||||||
|
|> RDF.Dataset.graphs()
|
||||||
|
|> Enum.reduce(RDF.PrefixMap.new(), fn graph, prefixes ->
|
||||||
|
if graph.prefixes do
|
||||||
|
RDF.PrefixMap.merge!(prefixes, graph.prefixes, :ignore)
|
||||||
|
else
|
||||||
|
prefixes
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
defp clear_metadata(%__MODULE__{} = dataset) do
|
defp clear_metadata(%__MODULE__{} = dataset) do
|
||||||
%__MODULE__{
|
%__MODULE__{
|
||||||
dataset
|
dataset
|
||||||
|
|
|
@ -4,7 +4,7 @@ defmodule RDF.Turtle.Encoder do
|
||||||
use RDF.Serialization.Encoder
|
use RDF.Serialization.Encoder
|
||||||
|
|
||||||
alias RDF.Turtle.Encoder.State
|
alias RDF.Turtle.Encoder.State
|
||||||
alias RDF.{BlankNode, Dataset, Description, Graph, IRI, XSD, Literal, LangString}
|
alias RDF.{BlankNode, Dataset, Description, Graph, IRI, XSD, Literal, LangString, PrefixMap}
|
||||||
|
|
||||||
@document_structure [
|
@document_structure [
|
||||||
:base,
|
:base,
|
||||||
|
@ -36,14 +36,19 @@ defmodule RDF.Turtle.Encoder do
|
||||||
@ordered_properties MapSet.new(@predicate_order)
|
@ordered_properties MapSet.new(@predicate_order)
|
||||||
|
|
||||||
@impl RDF.Serialization.Encoder
|
@impl RDF.Serialization.Encoder
|
||||||
@callback encode(Graph.t() | Dataset.t(), keyword | map) :: {:ok, String.t()} | {:error, any}
|
@spec encode(Graph.t() | Dataset.t(), keyword) :: {:ok, String.t()} | {:error, any}
|
||||||
def encode(data, opts \\ []) do
|
def encode(data, opts \\ []) do
|
||||||
with base =
|
base =
|
||||||
Keyword.get(opts, :base, Keyword.get(opts, :base_iri))
|
Keyword.get(opts, :base, Keyword.get(opts, :base_iri))
|
||||||
|> base_iri(data)
|
|> base_iri(data)
|
||||||
|> init_base_iri(),
|
|> init_base_iri()
|
||||||
prefixes = Keyword.get(opts, :prefixes) |> prefixes(data) |> init_prefixes(),
|
|
||||||
{:ok, state} = State.start_link(data, base, prefixes) do
|
prefixes =
|
||||||
|
Keyword.get(opts, :prefixes)
|
||||||
|
|> prefixes(data)
|
||||||
|
|> init_prefixes()
|
||||||
|
|
||||||
|
with {:ok, state} = State.start_link(data, base, prefixes) do
|
||||||
try do
|
try do
|
||||||
State.preprocess(state)
|
State.preprocess(state)
|
||||||
|
|
||||||
|
@ -74,7 +79,7 @@ defmodule RDF.Turtle.Encoder do
|
||||||
raise "unknown Turtle document element: #{inspect(element)}"
|
raise "unknown Turtle document element: #{inspect(element)}"
|
||||||
end
|
end
|
||||||
|
|
||||||
defp base_iri(nil, %RDF.Graph{base_iri: base_iri}) when not is_nil(base_iri), do: base_iri
|
defp base_iri(nil, %Graph{base_iri: base_iri}) when not is_nil(base_iri), do: base_iri
|
||||||
defp base_iri(nil, _), do: RDF.default_base_iri()
|
defp base_iri(nil, _), do: RDF.default_base_iri()
|
||||||
defp base_iri(base_iri, _), do: IRI.coerce_base(base_iri)
|
defp base_iri(base_iri, _), do: IRI.coerce_base(base_iri)
|
||||||
|
|
||||||
|
@ -91,19 +96,10 @@ defmodule RDF.Turtle.Encoder do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp prefixes(nil, %RDF.Graph{prefixes: prefixes}) when not is_nil(prefixes), do: prefixes
|
defp prefixes(nil, %Graph{prefixes: prefixes}) when not is_nil(prefixes), do: prefixes
|
||||||
|
|
||||||
defp prefixes(nil, %RDF.Dataset{} = dataset) do
|
defp prefixes(nil, %Dataset{} = dataset) do
|
||||||
prefixes =
|
prefixes = Dataset.prefixes(dataset)
|
||||||
dataset
|
|
||||||
|> RDF.Dataset.graphs()
|
|
||||||
|> Enum.reduce(RDF.PrefixMap.new(), fn graph, prefixes ->
|
|
||||||
if graph.prefixes do
|
|
||||||
RDF.PrefixMap.merge!(prefixes, graph.prefixes, :ignore)
|
|
||||||
else
|
|
||||||
prefixes
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
if Enum.empty?(prefixes) do
|
if Enum.empty?(prefixes) do
|
||||||
RDF.default_prefixes()
|
RDF.default_prefixes()
|
||||||
|
@ -113,7 +109,7 @@ defmodule RDF.Turtle.Encoder do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp prefixes(nil, _), do: RDF.default_prefixes()
|
defp prefixes(nil, _), do: RDF.default_prefixes()
|
||||||
defp prefixes(prefixes, _), do: RDF.PrefixMap.new(prefixes)
|
defp prefixes(prefixes, _), do: PrefixMap.new(prefixes)
|
||||||
|
|
||||||
defp init_prefixes(prefixes) do
|
defp init_prefixes(prefixes) do
|
||||||
Enum.reduce(prefixes, %{}, fn {prefix, iri}, reverse ->
|
Enum.reduce(prefixes, %{}, fn {prefix, iri}, reverse ->
|
||||||
|
|
|
@ -11,7 +11,8 @@ defmodule RDF.Test.Case do
|
||||||
|
|
||||||
using do
|
using do
|
||||||
quote do
|
quote do
|
||||||
alias RDF.{Dataset, Graph, Description, IRI, XSD, PropertyMap}
|
alias RDF.{Dataset, Graph, Description, IRI, XSD, PrefixMap, PropertyMap}
|
||||||
|
alias RDF.NS.{RDFS, OWL}
|
||||||
alias unquote(__MODULE__).{EX, FOAF}
|
alias unquote(__MODULE__).{EX, FOAF}
|
||||||
|
|
||||||
import RDF, only: [iri: 1, literal: 1, bnode: 1]
|
import RDF, only: [iri: 1, literal: 1, bnode: 1]
|
||||||
|
|
|
@ -1757,6 +1757,14 @@ defmodule RDF.DatasetTest do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "prefixes/1" do
|
||||||
|
assert Dataset.new()
|
||||||
|
|> Dataset.add(Graph.new(prefixes: [ex: EX, foo: RDFS]))
|
||||||
|
|> Dataset.add(Graph.new(name: EX.Graph, prefixes: [ex: EX, foo: OWL]))
|
||||||
|
|> Dataset.prefixes() ==
|
||||||
|
PrefixMap.new(ex: EX, foo: RDFS)
|
||||||
|
end
|
||||||
|
|
||||||
describe "Enumerable protocol" do
|
describe "Enumerable protocol" do
|
||||||
test "Enum.count" do
|
test "Enum.count" do
|
||||||
assert Enum.count(Dataset.new(name: EX.foo())) == 0
|
assert Enum.count(Dataset.new(name: EX.foo())) == 0
|
||||||
|
|
Loading…
Reference in a new issue