diff --git a/config/benchmark.exs b/config/benchmark.exs new file mode 100644 index 000000000..b4f5dbdbd --- /dev/null +++ b/config/benchmark.exs @@ -0,0 +1,88 @@ +use Mix.Config + +# We don't run a server during test. If one is required, +# you can enable the server option below. +config :pleroma, Pleroma.Web.Endpoint, + http: [port: 4001], + url: [port: 4001], + server: true + +# Disable captha for tests +config :pleroma, Pleroma.Captcha, + # It should not be enabled for automatic tests + enabled: false, + # A fake captcha service for tests + method: Pleroma.Captcha.Mock + +# Print only warnings and errors during test +config :logger, level: :warn + +config :pleroma, :auth, oauth_consumer_strategies: [] + +config :pleroma, Pleroma.Upload, filters: [], link_name: false + +config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads" + +config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test, enabled: true + +config :pleroma, :instance, + email: "admin@example.com", + notify_email: "noreply@example.com", + skip_thread_containment: false, + federating: false + +config :pleroma, :activitypub, sign_object_fetches: false + +# Configure your database +config :pleroma, Pleroma.Repo, + adapter: Ecto.Adapters.Postgres, + username: System.get_env("DB_USER") || "postgres", + database: System.get_env("DB_DATABASE") || "pleroma_test", + hostname: System.get_env("DB_HOST") || "localhost", + # username: "pleroma", + password: + System.get_env("DB_PASS") || + "cAUrGezwXjRwd/lIPzZAcwjb/hiZiGi3FIaSGy9l/XsTcGA61FMy7eCBiRcg1DyQ", + # password: "", + pool_size: 10, + timeout: 180_000 + +# Reduce hash rounds for testing +config :pbkdf2_elixir, rounds: 1 + +config :tesla, adapter: Tesla.Mock + +config :pleroma, :rich_media, + enabled: false, + ignore_hosts: [], + ignore_tld: ["local", "localdomain", "lan"] + +config :web_push_encryption, :vapid_details, + subject: "mailto:administrator@example.com", + public_key: + "BLH1qVhJItRGCfxgTtONfsOKDc9VRAraXw-3NsmjMngWSh7NxOizN6bkuRA7iLTMPS82PjwJAr3UoK9EC1IFrz4", + private_key: "_-XZ0iebPrRfZ_o0-IatTdszYa8VCH1yLN-JauK7HHA" + +config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock + +config :pleroma_job_queue, disabled: true + +config :pleroma, Pleroma.ScheduledActivity, + daily_user_limit: 2, + total_user_limit: 3, + enabled: false + +config :pleroma, :rate_limit, + search: [{1000, 30}, {1000, 30}], + app_account_creation: {10_000, 5}, + password_reset: {1000, 30} + +config :pleroma, :http_security, report_uri: "https://endpoint.com" + +config :pleroma, :http, send_user_agent: false + +rum_enabled = System.get_env("RUM_ENABLED") == "true" +config :pleroma, :database, rum_enabled: rum_enabled +IO.puts("RUM enabled: #{rum_enabled}") + +config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock diff --git a/lib/load_testing/fetcher.ex b/lib/load_testing/fetcher.ex new file mode 100644 index 000000000..70c0fcd0c --- /dev/null +++ b/lib/load_testing/fetcher.ex @@ -0,0 +1,116 @@ +defmodule Pleroma.LoadTesting.Fetcher do + use Pleroma.LoadTesting.Helper + + def fetch_user(user) do + IO.puts("=================================") + + {time, _value} = :timer.tc(fn -> Repo.get_by(User, id: user.id) end) + + IO.puts("Query user by id: #{to_sec(time)} sec.") + + {time, _value} = + :timer.tc(fn -> + Repo.get_by(User, ap_id: user.ap_id) + end) + + IO.puts("Query user by ap_id: #{to_sec(time)} sec.") + + {time, _value} = + :timer.tc(fn -> + Repo.get_by(User, email: user.email) + end) + + IO.puts("Query user by email: #{to_sec(time)} sec.") + + {time, _value} = :timer.tc(fn -> Repo.get_by(User, nickname: user.nickname) end) + + IO.puts("Query user by nickname: #{to_sec(time)} sec.") + end + + def query_timelines(user) do + IO.puts("\n=================================") + + params = %{ + "count" => 20, + "with_muted" => true, + "type" => ["Create", "Announce"], + "blocking_user" => user, + "muting_user" => user, + "user" => user + } + + {time, _} = + :timer.tc(fn -> + ActivityPub.ActivityPub.fetch_activities([user.ap_id | user.following], params) + end) + + IO.puts("Query user home timeline: #{to_sec(time)} sec.") + + params = %{ + "count" => 20, + "local_only" => true, + "only_media" => "false", + "type" => ["Create", "Announce"], + "with_muted" => "true", + "blocking_user" => user, + "muting_user" => user + } + + {time, _} = + :timer.tc(fn -> + ActivityPub.ActivityPub.fetch_public_activities(params) + end) + + IO.puts("Query user mastodon public timeline: #{to_sec(time)} sec.") + + params = %{ + "count" => 20, + "only_media" => "false", + "type" => ["Create", "Announce"], + "with_muted" => "true", + "blocking_user" => user, + "muting_user" => user + } + + {time, _} = + :timer.tc(fn -> + ActivityPub.ActivityPub.fetch_public_activities(params) + end) + + IO.puts("Query user mastodon federated public timeline: #{to_sec(time)} sec.") + end + + def query_notifications(user) do + IO.puts("\n=================================") + params = %{"count" => "20", "with_muted" => "false"} + + {time, _} = + :timer.tc(fn -> Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, params) end) + + IO.puts("Query user notifications with out muted: #{to_sec(time)} sec.") + + params = %{"count" => "20", "with_muted" => "true"} + + {time, _} = + :timer.tc(fn -> Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, params) end) + + IO.puts("Query user notifications with muted: #{to_sec(time)} sec.") + end + + def query_long_thread(user, activity) do + IO.puts("\n=================================") + + {time, replies} = + :timer.tc(fn -> + Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context( + activity.data["context"], + %{ + "blocking_user" => user, + "user" => user + } + ) + end) + + IO.puts("Query long thread with #{length(replies)} replies: #{to_sec(time)} sec.") + end +end diff --git a/lib/load_testing/generator.ex b/lib/load_testing/generator.ex new file mode 100644 index 000000000..a9016b9e8 --- /dev/null +++ b/lib/load_testing/generator.ex @@ -0,0 +1,83 @@ +defmodule Pleroma.LoadTesting.Generator do + use Pleroma.LoadTesting.Helper + + def generate_users(opts) do + IO.puts("Starting generating #{opts[:users_max]} users...") + {time, _} = :timer.tc(fn -> do_generate_users(opts) end) + IO.puts("Inserting users take #{to_sec(time)} sec.\n") + end + + defp do_generate_users(opts) do + min = Keyword.get(opts, :users_min, 1) + max = Keyword.get(opts, :users_max) + + query = + "INSERT INTO \"users\" (\"ap_id\",\"bio\",\"email\",\"follower_address\",\"following\",\"following_address\",\"info\", + \"local\",\"name\",\"nickname\",\"password_hash\",\"tags\",\"id\",\"inserted_at\",\"updated_at\") VALUES \n" + + users = + Task.async_stream( + min..max, + &generate_user_data(&1), + max_concurrency: 10, + timeout: 30_000 + ) + |> Enum.reduce("", fn {:ok, data}, acc -> acc <> data <> ", \n" end) + + query = query <> String.replace_trailing(users, ", \n", ";") + + Ecto.Adapters.SQL.query!(Repo, query) + end + + defp generate_user_data(i) do + user = %User{ + name: "Test ใƒ†ใ‚นใƒˆ User #{i}", + email: "user#{i}@example.com", + nickname: "nick#{i}", + password_hash: Comeonin.Pbkdf2.hashpwsalt("test"), + bio: "Tester Number #{i}", + info: %{} + } + + user = %{ + user + | ap_id: User.ap_id(user), + follower_address: User.ap_followers(user), + following_address: User.ap_following(user), + following: [User.ap_id(user)] + } + + "('#{user.ap_id}', '#{user.bio}', '#{user.email}', '#{user.follower_address}', '{#{ + user.following + }}', '#{user.following_address}', '#{Jason.encode!(user.info)}', '#{user.local}', '#{ + user.name + }', '#{user.nickname}', '#{user.password_hash}', '{#{user.tags}}', uuid_generate_v4(), NOW(), NOW())" + end + + def generate_activities(users, opts) do + IO.puts("Starting generating #{opts[:activities_max]} activities...") + {time, _} = :timer.tc(fn -> do_generate_activities(users, opts) end) + IO.puts("Inserting activities take #{to_sec(time)} sec.\n") + end + + defp do_generate_activities(users, opts) do + Task.async_stream( + 1..opts[:activities_max], + fn _ -> + do_generate_activity(users, opts) + end, + max_concurrency: 10, + timeout: 30_000 + ) + |> Stream.run() + end + + defp do_generate_activity(users, opts) do + status = + if opts[:mention], + do: "some status with @#{opts[:mention].nickname}", + else: "some status" + + Pleroma.Web.CommonAPI.post(Enum.random(users), %{"status" => status}) + end +end diff --git a/lib/load_testing/helper.ex b/lib/load_testing/helper.ex new file mode 100644 index 000000000..338dba323 --- /dev/null +++ b/lib/load_testing/helper.ex @@ -0,0 +1,16 @@ +defmodule Pleroma.LoadTesting.Helper do + defmacro __using__(_) do + quote do + import Ecto.Query + alias Pleroma.Activity + alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.ActivityPub + alias Pleroma.Web.CommonAPI + + defp to_sec(microseconds), do: microseconds / 1_000_000 + end + end +end diff --git a/lib/mix/tasks/pleroma/load_testing.ex b/lib/mix/tasks/pleroma/load_testing.ex new file mode 100644 index 000000000..9ed30db7e --- /dev/null +++ b/lib/mix/tasks/pleroma/load_testing.ex @@ -0,0 +1,100 @@ +defmodule Mix.Tasks.Pleroma.LoadTesting do + use Mix.Task + use Pleroma.LoadTesting.Helper + import Mix.Pleroma + import Pleroma.LoadTesting.Generator + import Pleroma.LoadTesting.Fetcher + + # tODO: remove autovacuum worker until generation is not ended + @shortdoc "Factory for generation data" + @moduledoc """ + Generates data like: + - users + - activities with notifications + + ## Generate data + MIX_ENV=test mix pleroma.load_testing --users 10000 --activities 20000 + MIX_ENV=test mix pleroma.load_testing -u 10000 -a 20000 + + Options: + - `--users NUMBER` - number of users to generate (default: 10000) + - `--activities NUMBER` - number of activities to generate (default: 20000) + """ + + @aliases [u: :users, a: :activities, d: :delete] + @switches [users: :integer, activities: :integer, delete: :boolean] + @users_default 20_000 + @activities_default 50_000 + + def run(args) do + {opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases) + start_pleroma() + + current_max = Keyword.get(opts, :users, @users_default) + activities_max = Keyword.get(opts, :activities, @activities_default) + + {users_min, users_max} = + if opts[:delete] do + clean_tables() + {1, current_max} + else + current_count = Repo.aggregate(from(u in User), :count, :id) + 1 + {current_count, current_max + current_count} + end + + opts = + Keyword.put(opts, :users_min, users_min) + |> Keyword.put(:users_max, users_max) + |> Keyword.put(:activities_max, activities_max) + + generate_users(opts) + + # main user for queries + IO.puts("Fetching main user...") + + {time, user} = + :timer.tc(fn -> Repo.one(from(u in User, order_by: fragment("RANDOM()"), limit: 1)) end) + + IO.puts("Fetching main user take #{to_sec(time)} sec.\n") + + IO.puts("Fetching users...") + + {time, users} = + :timer.tc(fn -> + Repo.all( + from(u in User, + where: u.id != ^user.id, + order_by: fragment("RANDOM()"), + limit: 10 + ) + ) + end) + + IO.puts("Fetching users take #{to_sec(time)} sec.\n") + + generate_activities(users, opts) + + generate_activities(users, Keyword.put(opts, :mention, user)) + + # generate_replies(user, users, activities) + + # activity = Enum.random(activities) + # generate_long_thread(user, users, activity) + + IO.puts("Users in DB: #{Repo.aggregate(from(u in User), :count, :id)}") + IO.puts("Activities in DB: #{Repo.aggregate(from(a in Activity), :count, :id)}") + IO.puts("Objects in DB: #{Repo.aggregate(from(o in Object), :count, :id)}") + IO.puts("Notifications in DB: #{Repo.aggregate(from(n in Notification), :count, :id)}") + + query_timelines(user) + query_notifications(user) + # query_long_thread(user, activity) + end + + defp clean_tables do + IO.puts("\n\nDeleting old data...\n") + Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;") + Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;") + Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;") + end +end