sync with develop
This commit is contained in:
parent
eb9744cade
commit
1f29ecdcd7
6 changed files with 99 additions and 50 deletions
|
@ -85,6 +85,48 @@ def generate(user, opts \\ []) do
|
|||
:ok
|
||||
end
|
||||
|
||||
def generate_power_intervals(opts \\ []) do
|
||||
count = Keyword.get(opts, :count, 20)
|
||||
power = Keyword.get(opts, :power, 2)
|
||||
IO.puts("Generating #{count} intervals for a power #{power} series...")
|
||||
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
|
||||
sum = Enum.sum(counts)
|
||||
|
||||
densities =
|
||||
Enum.map(counts, fn c ->
|
||||
c / sum
|
||||
end)
|
||||
|
||||
densities
|
||||
|> Enum.reduce(0, fn density, acc ->
|
||||
if acc == 0 do
|
||||
[{0, density}]
|
||||
else
|
||||
[{_, lower} | _] = acc
|
||||
[{lower, lower + density} | acc]
|
||||
end
|
||||
end)
|
||||
|> Enum.reverse()
|
||||
end
|
||||
|
||||
def generate_tagged_activities(opts \\ []) do
|
||||
tag_count = Keyword.get(opts, :tag_count, 20)
|
||||
users = Keyword.get(opts, :users, Repo.all(Pleroma.User))
|
||||
activity_count = Keyword.get(opts, :count, 200_000)
|
||||
|
||||
intervals = generate_power_intervals(count: tag_count)
|
||||
|
||||
IO.puts(
|
||||
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
|
||||
)
|
||||
|
||||
Enum.each(1..activity_count, fn _ ->
|
||||
random = :rand.uniform()
|
||||
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
||||
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
|
||||
end)
|
||||
end
|
||||
|
||||
defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
|
||||
group =
|
||||
if visibility == "public",
|
||||
|
|
|
@ -1,3 +1,14 @@
|
|||
defmodule Pleroma.LoadTesting.Helper do
|
||||
alias Ecto.Adapters.SQL
|
||||
alias Pleroma.Repo
|
||||
|
||||
def to_sec(microseconds), do: microseconds / 1_000_000
|
||||
|
||||
def clean_tables do
|
||||
IO.puts("Deleting old data...\n")
|
||||
SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE oban_jobs CASCADE;")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,31 +20,31 @@ defmodule Pleroma.LoadTesting.Users do
|
|||
def generate(opts \\ []) do
|
||||
opts = Keyword.merge(@defaults, opts)
|
||||
|
||||
IO.puts("Starting generating #{opts[:users]} users...")
|
||||
|
||||
{time, _} = :timer.tc(fn -> generate_users(opts[:users]) end)
|
||||
|
||||
IO.puts("Generating users take #{to_sec(time)} sec.\n")
|
||||
generate_users(opts[:users])
|
||||
|
||||
main_user =
|
||||
Repo.one(from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1))
|
||||
|
||||
IO.puts("Starting making friends for #{opts[:friends]} users...")
|
||||
{time, _} = :timer.tc(fn -> make_friends(main_user, opts[:friends]) end)
|
||||
|
||||
IO.puts("Making friends take #{to_sec(time)} sec.\n")
|
||||
make_friends(main_user, opts[:friends])
|
||||
|
||||
Repo.get(User, main_user.id)
|
||||
end
|
||||
|
||||
defp generate_users(max) do
|
||||
Task.async_stream(
|
||||
1..max,
|
||||
&generate_user(&1),
|
||||
max_concurrency: @max_concurrency,
|
||||
timeout: 30_000
|
||||
)
|
||||
|> Stream.run()
|
||||
def generate_users(max) do
|
||||
IO.puts("Starting generating #{opts[:users]} users...")
|
||||
|
||||
{time, _} =
|
||||
:timer.tc(fn ->
|
||||
Task.async_stream(
|
||||
1..max,
|
||||
&generate_user(&1),
|
||||
max_concurrency: @max_concurrency,
|
||||
timeout: 30_000
|
||||
)
|
||||
|> Stream.run()
|
||||
end)
|
||||
|
||||
IO.puts("Generating users take #{to_sec(time)} sec.\n")
|
||||
end
|
||||
|
||||
defp generate_user(i) do
|
||||
|
@ -86,18 +86,25 @@ defp user_urls(%{local: false} = user) do
|
|||
Map.merge(user, urls)
|
||||
end
|
||||
|
||||
defp make_friends(main_user, max) when is_integer(max) do
|
||||
number_of_users =
|
||||
(max / 2)
|
||||
|> Kernel.trunc()
|
||||
def make_friends(main_user, max) when is_integer(max) do
|
||||
IO.puts("Starting making friends for #{opts[:friends]} users...")
|
||||
|
||||
main_user
|
||||
|> get_users(%{limit: number_of_users, local: :local})
|
||||
|> run_stream(main_user)
|
||||
{time, _} =
|
||||
:timer.tc(fn ->
|
||||
number_of_users =
|
||||
(max / 2)
|
||||
|> Kernel.trunc()
|
||||
|
||||
main_user
|
||||
|> get_users(%{limit: number_of_users, local: :external})
|
||||
|> run_stream(main_user)
|
||||
main_user
|
||||
|> get_users(%{limit: number_of_users, local: :local})
|
||||
|> run_stream(main_user)
|
||||
|
||||
main_user
|
||||
|> get_users(%{limit: number_of_users, local: :external})
|
||||
|> run_stream(main_user)
|
||||
end)
|
||||
|
||||
IO.puts("Making friends take #{to_sec(time)} sec.\n")
|
||||
end
|
||||
|
||||
defp make_friends(%User{} = main_user, %User{} = user) do
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
|
||||
use Mix.Task
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.LoadTesting.Generator
|
||||
|
||||
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Web.MastodonAPI.TimelineController
|
||||
|
||||
def run(_args) do
|
||||
Mix.Pleroma.start_pleroma()
|
||||
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
|
||||
|
@ -11,8 +14,8 @@ def run(_args) do
|
|||
if activities_count == 0 do
|
||||
IO.puts("Did not find any activities, cleaning and generating")
|
||||
clean_tables()
|
||||
Generator.generate_users(users_max: 10)
|
||||
Generator.generate_tagged_activities()
|
||||
Pleroma.LoadTesting.Users.generate_users(10)
|
||||
Pleroma.LoadTesting.Activities.generate_tagged_activities()
|
||||
else
|
||||
IO.puts("Found #{activities_count} activities, won't generate new ones")
|
||||
end
|
||||
|
@ -34,7 +37,7 @@ def run(_args) do
|
|||
Benchee.run(
|
||||
%{
|
||||
"Hashtag fetching, any" => fn tags ->
|
||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||
TimelineController.hashtag_fetching(
|
||||
%{
|
||||
"any" => tags
|
||||
},
|
||||
|
@ -44,7 +47,7 @@ def run(_args) do
|
|||
end,
|
||||
# Will always return zero results because no overlapping hashtags are generated.
|
||||
"Hashtag fetching, all" => fn tags ->
|
||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||
TimelineController.hashtag_fetching(
|
||||
%{
|
||||
"all" => tags
|
||||
},
|
||||
|
@ -64,7 +67,7 @@ def run(_args) do
|
|||
Benchee.run(
|
||||
%{
|
||||
"Hashtag fetching" => fn tag ->
|
||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||
TimelineController.hashtag_fetching(
|
||||
%{
|
||||
"tag" => tag
|
||||
},
|
||||
|
@ -77,11 +80,4 @@ def run(_args) do
|
|||
time: 5
|
||||
)
|
||||
end
|
||||
|
||||
defp clean_tables do
|
||||
IO.puts("Deleting old data...\n")
|
||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
defmodule Mix.Tasks.Pleroma.LoadTesting do
|
||||
use Mix.Task
|
||||
import Ecto.Query
|
||||
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
|
||||
|
||||
alias Ecto.Adapters.SQL
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
|
@ -63,12 +63,4 @@ def run(args) do
|
|||
|
||||
Pleroma.LoadTesting.Fetcher.run_benchmarks(user)
|
||||
end
|
||||
|
||||
defp clean_tables do
|
||||
IO.puts("Deleting old data...\n")
|
||||
SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
||||
SQL.query!(Repo, "TRUNCATE oban_jobs CASCADE;")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
defmodule Mix.Pleroma do
|
||||
@doc "Common functions to be reused in mix tasks"
|
||||
def start_pleroma do
|
||||
Mix.Task.run("app.start")
|
||||
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||
|
||||
if Pleroma.Config.get(:env) != :test do
|
||||
|
|
Loading…
Reference in a new issue