mirror of
https://git.pleroma.social/pleroma/pleroma.git
synced 2025-03-12 22:52:41 +00:00
Merge branch 'release/2.9.0' into 'stable'
Release/2.9.0 See merge request pleroma/pleroma!4331
This commit is contained in:
commit
af6d5470d2
99 changed files with 3133 additions and 805 deletions
27
CHANGELOG.md
27
CHANGELOG.md
|
@ -4,6 +4,33 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## 2.9.0
|
||||
|
||||
### Security
|
||||
- Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
||||
- Fix several spoofing vectors
|
||||
|
||||
### Changed
|
||||
- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
||||
|
||||
### Added
|
||||
- Include "published" in actor view
|
||||
- Link to exported outbox/followers/following collections in backup actor.json
|
||||
- Hashtag following
|
||||
- Allow to specify post language
|
||||
|
||||
### Fixed
|
||||
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||
- Fix Mastodon incoming edits with inlined "likes"
|
||||
- Allow incoming "Listen" activities
|
||||
- Fix missing check for domain presence in rich media ignore_host configuration
|
||||
- Fix Rich Media parsing of TwitterCards/OpenGraph to adhere to the spec and always choose the first image if multiple are provided.
|
||||
- Fix OpenGraph/TwitterCard meta tag ordering for posts with multiple attachments
|
||||
- Fix blurhash generation crashes
|
||||
|
||||
### Removed
|
||||
- Retire MRFs DNSRBL, FODirectReply, and QuietReply
|
||||
|
||||
## 2.8.0
|
||||
|
||||
### Changed
|
||||
|
|
1
changelog.d/301-small-image-redirect.change
Normal file
1
changelog.d/301-small-image-redirect.change
Normal file
|
@ -0,0 +1 @@
|
|||
Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
1
changelog.d/actor-published-date.add
Normal file
1
changelog.d/actor-published-date.add
Normal file
|
@ -0,0 +1 @@
|
|||
Include "published" in actor view
|
1
changelog.d/backup-links.add
Normal file
1
changelog.d/backup-links.add
Normal file
|
@ -0,0 +1 @@
|
|||
Link to exported outbox/followers/following collections in backup actor.json
|
1
changelog.d/c2s-update-verify.fix
Normal file
1
changelog.d/c2s-update-verify.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Verify a local Update sent through AP C2S so users can only update their own objects
|
1
changelog.d/ensure-authorized-fetch.security
Normal file
1
changelog.d/ensure-authorized-fetch.security
Normal file
|
@ -0,0 +1 @@
|
|||
Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
1
changelog.d/fix-mastodon-edits.fix
Normal file
1
changelog.d/fix-mastodon-edits.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix Mastodon incoming edits with inlined "likes"
|
1
changelog.d/follow-hashtags.add
Normal file
1
changelog.d/follow-hashtags.add
Normal file
|
@ -0,0 +1 @@
|
|||
Hashtag following
|
1
changelog.d/incoming-scrobbles.fix
Normal file
1
changelog.d/incoming-scrobbles.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Allow incoming "Listen" activities
|
1
changelog.d/post-languages.add
Normal file
1
changelog.d/post-languages.add
Normal file
|
@ -0,0 +1 @@
|
|||
Allow to specify post language
|
1
changelog.d/retire_mrfs.remove
Normal file
1
changelog.d/retire_mrfs.remove
Normal file
|
@ -0,0 +1 @@
|
|||
Retire MRFs DNSRBL, FODirectReply, and QuietReply
|
1
changelog.d/rich-media-ignore-host.fix
Normal file
1
changelog.d/rich-media-ignore-host.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix missing check for domain presence in rich media ignore_host configuration
|
1
changelog.d/rich-media-twittercard.fix
Normal file
1
changelog.d/rich-media-twittercard.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix Rich Media parsing of TwitterCards/OpenGraph to adhere to the spec and always choose the first image if multiple are provided.
|
1
changelog.d/twittercard-tag-order.fix
Normal file
1
changelog.d/twittercard-tag-order.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix OpenGraph/TwitterCard meta tag ordering for posts with multiple attachments
|
1
changelog.d/vips-blurhash.fix
Normal file
1
changelog.d/vips-blurhash.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix blurhash generation crashes
|
|
@ -150,7 +150,10 @@ config :mime, :types, %{
|
|||
"application/xrd+xml" => ["xrd+xml"],
|
||||
"application/jrd+json" => ["jrd+json"],
|
||||
"application/activity+json" => ["activity+json"],
|
||||
"application/ld+json" => ["activity+json"]
|
||||
"application/ld+json" => ["activity+json"],
|
||||
# Can be removed when bumping MIME past 2.0.5
|
||||
# see https://akkoma.dev/AkkomaGang/akkoma/issues/657
|
||||
"image/apng" => ["apng"]
|
||||
}
|
||||
|
||||
config :tesla, adapter: Tesla.Adapter.Hackney
|
||||
|
@ -359,7 +362,8 @@ config :pleroma, :activitypub,
|
|||
follow_handshake_timeout: 500,
|
||||
note_replies_output_limit: 5,
|
||||
sign_object_fetches: true,
|
||||
authorized_fetch_mode: false
|
||||
authorized_fetch_mode: false,
|
||||
client_api_enabled: false
|
||||
|
||||
config :pleroma, :streamer,
|
||||
workers: 3,
|
||||
|
@ -413,11 +417,6 @@ config :pleroma, :mrf_vocabulary,
|
|||
accept: [],
|
||||
reject: []
|
||||
|
||||
config :pleroma, :mrf_dnsrbl,
|
||||
nameserver: "127.0.0.1",
|
||||
port: 53,
|
||||
zone: "bl.pleroma.com"
|
||||
|
||||
# threshold of 7 days
|
||||
config :pleroma, :mrf_object_age,
|
||||
threshold: 604_800,
|
||||
|
|
|
@ -1772,6 +1772,11 @@ config :pleroma, :config_description, [
|
|||
type: :integer,
|
||||
description: "Following handshake timeout",
|
||||
suggestions: [500]
|
||||
},
|
||||
%{
|
||||
key: :client_api_enabled,
|
||||
type: :boolean,
|
||||
description: "Allow client to server ActivityPub interactions"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -3302,8 +3307,7 @@ config :pleroma, :config_description, [
|
|||
suggestions: [
|
||||
Pleroma.Web.Preload.Providers.Instance,
|
||||
Pleroma.Web.Preload.Providers.User,
|
||||
Pleroma.Web.Preload.Providers.Timelines,
|
||||
Pleroma.Web.Preload.Providers.StatusNet
|
||||
Pleroma.Web.Preload.Providers.Timelines
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
@ -38,7 +38,10 @@ config :pleroma, :instance,
|
|||
external_user_synchronization: false,
|
||||
static_dir: "test/instance_static/"
|
||||
|
||||
config :pleroma, :activitypub, sign_object_fetches: false, follow_handshake_timeout: 0
|
||||
config :pleroma, :activitypub,
|
||||
sign_object_fetches: false,
|
||||
follow_handshake_timeout: 0,
|
||||
client_api_enabled: true
|
||||
|
||||
# Configure your database
|
||||
config :pleroma, Pleroma.Repo,
|
||||
|
|
|
@ -98,7 +98,7 @@ To add configuration to your config file, you can copy it from the base config.
|
|||
* `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...)
|
||||
* Possible values are the same as for `admin_privileges`
|
||||
|
||||
## :database
|
||||
## :features
|
||||
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
|
||||
|
||||
## Background migrations
|
||||
|
|
|
@ -93,6 +93,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
)
|
||||
|
||||
files = fetch_and_decode!(files_loc)
|
||||
files_to_unzip = for({_, f} <- files, do: f)
|
||||
|
||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||
|
||||
|
@ -103,17 +104,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
pack_name
|
||||
])
|
||||
|
||||
files_to_unzip =
|
||||
Enum.map(
|
||||
files,
|
||||
fn {_, f} -> to_charlist(f) end
|
||||
)
|
||||
|
||||
{:ok, _} =
|
||||
:zip.unzip(binary_archive,
|
||||
cwd: String.to_charlist(pack_path),
|
||||
file_list: files_to_unzip
|
||||
)
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip)
|
||||
|
||||
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||
|
||||
|
@ -201,7 +192,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|
||||
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
||||
|
||||
{:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir))
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir)
|
||||
|
||||
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
||||
|
||||
|
|
|
@ -20,7 +20,8 @@ defmodule Pleroma.Constants do
|
|||
"deleted_activity_id",
|
||||
"pleroma_internal",
|
||||
"generator",
|
||||
"rules"
|
||||
"rules",
|
||||
"language"
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -36,10 +37,12 @@ defmodule Pleroma.Constants do
|
|||
"updated",
|
||||
"emoji",
|
||||
"content",
|
||||
"contentMap",
|
||||
"summary",
|
||||
"sensitive",
|
||||
"attachment",
|
||||
"generator"
|
||||
"generator",
|
||||
"language"
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -100,7 +103,8 @@ defmodule Pleroma.Constants do
|
|||
"Announce",
|
||||
"Undo",
|
||||
"Flag",
|
||||
"EmojiReact"
|
||||
"EmojiReact",
|
||||
"Listen"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do
|
||||
use Ecto.Type
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [good_locale_code?: 1]
|
||||
|
||||
def type, do: :map
|
||||
|
||||
def cast(%{} = object) do
|
||||
with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do
|
||||
{:ok, data}
|
||||
else
|
||||
{_, nil} -> {:ok, nil}
|
||||
{:error, _} -> :error
|
||||
end
|
||||
end
|
||||
|
||||
def cast(_), do: :error
|
||||
|
||||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
defp validate_map(%{} = object) do
|
||||
{status, data} =
|
||||
object
|
||||
|> Enum.reduce({:ok, %{}}, fn
|
||||
{lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
|
||||
if good_locale_code?(lang) do
|
||||
{status, Map.put(acc, lang, value)}
|
||||
else
|
||||
{:modified, acc}
|
||||
end
|
||||
|
||||
_, {_status, acc} ->
|
||||
{:modified, acc}
|
||||
end)
|
||||
|
||||
if data == %{} do
|
||||
{status, nil}
|
||||
else
|
||||
{status, data}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,27 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do
|
||||
use Ecto.Type
|
||||
|
||||
def type, do: :string
|
||||
|
||||
def cast(language) when is_binary(language) do
|
||||
if good_locale_code?(language) do
|
||||
{:ok, language}
|
||||
else
|
||||
{:error, :invalid_language}
|
||||
end
|
||||
end
|
||||
|
||||
def cast(_), do: :error
|
||||
|
||||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+\z$>
|
||||
|
||||
def good_locale_code?(_code), do: false
|
||||
end
|
|
@ -24,12 +24,13 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Emoji.Pack
|
||||
alias Pleroma.SafeZip
|
||||
alias Pleroma.Utils
|
||||
|
||||
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
||||
def create(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
dir <- Path.join(emoji_path(), name),
|
||||
dir <- path_join_name_safe(emoji_path(), name),
|
||||
:ok <- File.mkdir(dir) do
|
||||
save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")})
|
||||
end
|
||||
|
@ -65,43 +66,21 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
||||
def delete(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
pack_path <- Path.join(emoji_path(), name) do
|
||||
pack_path <- path_join_name_safe(emoji_path(), name) do
|
||||
File.rm_rf(pack_path)
|
||||
end
|
||||
end
|
||||
|
||||
@spec unpack_zip_emojies(list(tuple())) :: list(map())
|
||||
defp unpack_zip_emojies(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn
|
||||
{_, path, s, _, _, _}, acc when elem(s, 2) == :regular ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
|
||||
_, acc ->
|
||||
acc
|
||||
end)
|
||||
end
|
||||
|
||||
@spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) ::
|
||||
{:ok, t()}
|
||||
| {:error, File.posix() | atom()}
|
||||
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
|
||||
with {:ok, zip_files} <- :zip.table(to_charlist(file.path)),
|
||||
[_ | _] = emojies <- unpack_zip_emojies(zip_files),
|
||||
with {:ok, zip_files} <- SafeZip.list_dir_file(file.path),
|
||||
[_ | _] = emojies <- map_zip_emojies(zip_files),
|
||||
{:ok, tmp_dir} <- Utils.tmp_dir("emoji") do
|
||||
try do
|
||||
{:ok, _emoji_files} =
|
||||
:zip.unzip(
|
||||
to_charlist(file.path),
|
||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
|
||||
)
|
||||
SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojies, & &1[:path]))
|
||||
|
||||
{_, updated_pack} =
|
||||
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
|
||||
|
@ -292,7 +271,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
|
||||
def load_pack(name) do
|
||||
name = Path.basename(name)
|
||||
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
||||
pack_file = path_join_name_safe(emoji_path(), name) |> Path.join("pack.json")
|
||||
|
||||
with {:ok, _} <- File.stat(pack_file),
|
||||
{:ok, pack_data} <- File.read(pack_file) do
|
||||
|
@ -416,10 +395,9 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp create_archive_and_cache(pack, hash) do
|
||||
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
|
||||
{:ok, {_, result}} =
|
||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
||||
pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end)
|
||||
files = ["pack.json" | pack_file_list]
|
||||
{:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true)
|
||||
|
||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||
|
@ -478,7 +456,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
|
||||
file_path = Path.join(pack.path, filename)
|
||||
file_path = path_join_safe(pack.path, filename)
|
||||
create_subdirs(file_path)
|
||||
|
||||
with {:ok, _} <- File.copy(upload_path, file_path) do
|
||||
|
@ -497,8 +475,8 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp rename_file(pack, filename, new_filename) do
|
||||
old_path = Path.join(pack.path, filename)
|
||||
new_path = Path.join(pack.path, new_filename)
|
||||
old_path = path_join_safe(pack.path, filename)
|
||||
new_path = path_join_safe(pack.path, new_filename)
|
||||
create_subdirs(new_path)
|
||||
|
||||
with :ok <- File.rename(old_path, new_path) do
|
||||
|
@ -516,7 +494,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp remove_file(pack, shortcode) do
|
||||
with {:ok, filename} <- get_filename(pack, shortcode),
|
||||
emoji <- Path.join(pack.path, filename),
|
||||
emoji <- path_join_safe(pack.path, filename),
|
||||
:ok <- File.rm(emoji) do
|
||||
remove_dir_if_empty(emoji, filename)
|
||||
end
|
||||
|
@ -534,7 +512,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp get_filename(pack, shortcode) do
|
||||
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
|
||||
file_path <- Path.join(pack.path, filename),
|
||||
file_path <- path_join_safe(pack.path, filename),
|
||||
{:ok, _} <- File.stat(file_path) do
|
||||
{:ok, filename}
|
||||
else
|
||||
|
@ -584,11 +562,10 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> path end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
||||
|
||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||
files = if pack_info[:fallback], do: files, else: ["pack.json" | files]
|
||||
SafeZip.unzip_data(archive, local_pack.path, files)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -649,13 +626,43 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp validate_has_all_files(pack, zip) do
|
||||
with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do
|
||||
# Check if all files from the pack.json are in the archive
|
||||
pack.files
|
||||
|> Enum.all?(fn {_, from_manifest} ->
|
||||
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
||||
# Check if all files from the pack.json are in the archive
|
||||
eset =
|
||||
Enum.reduce(pack.files, MapSet.new(), fn
|
||||
{_, file}, s -> MapSet.put(s, to_charlist(file))
|
||||
end)
|
||||
|> if(do: :ok, else: {:error, :incomplete})
|
||||
|
||||
if SafeZip.contains_all_data?(zip, eset),
|
||||
do: :ok,
|
||||
else: {:error, :incomplete}
|
||||
end
|
||||
|
||||
defp path_join_name_safe(dir, name) do
|
||||
if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do
|
||||
raise "Invalid or malicious pack name: #{name}"
|
||||
else
|
||||
Path.join(dir, name)
|
||||
end
|
||||
end
|
||||
|
||||
defp path_join_safe(dir, path) do
|
||||
{:ok, safe_path} = Path.safe_relative(path)
|
||||
Path.join(dir, safe_path)
|
||||
end
|
||||
|
||||
defp map_zip_emojies(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn path, acc ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
# note: this only checks the shortcode, if an emoji already exists on the same path, but
|
||||
# with a different shortcode, the existing one will be degraded to an alias of the new
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -65,24 +65,12 @@ defmodule Pleroma.Frontend do
|
|||
end
|
||||
|
||||
def unzip(zip, dest) do
|
||||
with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
|
||||
Enum.each(unzipped, fn {filename, data} ->
|
||||
path = filename
|
||||
|
||||
new_file_path = Path.join(dest, path)
|
||||
|
||||
path
|
||||
|> Path.dirname()
|
||||
|> then(&Path.join(dest, &1))
|
||||
|> File.mkdir_p!()
|
||||
|
||||
if not File.dir?(new_file_path) do
|
||||
File.write!(new_file_path, data)
|
||||
end
|
||||
end)
|
||||
case Pleroma.SafeZip.unzip_data(zip, dest) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ defmodule Pleroma.Hashtag do
|
|||
alias Pleroma.Hashtag
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User.HashtagFollow
|
||||
|
||||
schema "hashtags" do
|
||||
field(:name, :string)
|
||||
|
@ -27,6 +28,14 @@ defmodule Pleroma.Hashtag do
|
|||
|> String.trim()
|
||||
end
|
||||
|
||||
def get_by_id(id) do
|
||||
Repo.get(Hashtag, id)
|
||||
end
|
||||
|
||||
def get_by_name(name) do
|
||||
Repo.get_by(Hashtag, name: normalize_name(name))
|
||||
end
|
||||
|
||||
def get_or_create_by_name(name) do
|
||||
changeset = changeset(%Hashtag{}, %{name: name})
|
||||
|
||||
|
@ -103,4 +112,22 @@ defmodule Pleroma.Hashtag do
|
|||
{:ok, deleted_count}
|
||||
end
|
||||
end
|
||||
|
||||
def get_followers(%Hashtag{id: hashtag_id}) do
|
||||
from(hf in HashtagFollow)
|
||||
|> where([hf], hf.hashtag_id == ^hashtag_id)
|
||||
|> join(:inner, [hf], u in assoc(hf, :user))
|
||||
|> select([hf, u], u.id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_recipients_for_activity(%Pleroma.Activity{object: %{hashtags: tags}})
|
||||
when is_list(tags) do
|
||||
tags
|
||||
|> Enum.map(&get_followers/1)
|
||||
|> List.flatten()
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
def get_recipients_for_activity(_activity), do: []
|
||||
end
|
||||
|
|
|
@ -89,9 +89,9 @@ defmodule Pleroma.Pagination do
|
|||
|
||||
defp cast_params(params) do
|
||||
param_types = %{
|
||||
min_id: :string,
|
||||
since_id: :string,
|
||||
max_id: :string,
|
||||
min_id: params[:id_type] || :string,
|
||||
since_id: params[:id_type] || :string,
|
||||
max_id: params[:id_type] || :string,
|
||||
offset: :integer,
|
||||
limit: :integer,
|
||||
skip_extra_order: :boolean,
|
||||
|
|
216
lib/pleroma/safe_zip.ex
Normal file
216
lib/pleroma/safe_zip.ex
Normal file
|
@ -0,0 +1,216 @@
|
|||
# Akkoma: Magically expressive social media
|
||||
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.SafeZip do
|
||||
@moduledoc """
|
||||
Wraps the subset of Erlang's zip module we’d like to use
|
||||
but enforces path-traversal safety everywhere and other checks.
|
||||
|
||||
For convenience almost all functions accept both elixir strings and charlists,
|
||||
but output elixir strings themselves. However, this means the input parameter type
|
||||
can no longer be used to distinguish archive file paths from archive binary data in memory,
|
||||
thus where needed both a _data and _file variant are provided.
|
||||
"""
|
||||
|
||||
@type text() :: String.t() | [char()]
|
||||
|
||||
defp safe_path?(path) do
|
||||
# Path accepts elixir’s chardata()
|
||||
case Path.safe_relative(path) do
|
||||
{:ok, _} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp safe_type?(file_type) do
|
||||
if file_type in [:regular, :directory] do
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, nil), do: nil
|
||||
|
||||
defp maybe_add_file(:regular, path_charlist, file_list),
|
||||
do: [to_string(path_charlist) | file_list]
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, file_list), do: file_list
|
||||
|
||||
@spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_maybe_list_files(archive, opts, list) do
|
||||
acc = if list, do: [], else: nil
|
||||
|
||||
with {:ok, table} <- :zip.table(archive, opts) do
|
||||
Enum.reduce_while(table, {:ok, acc}, fn
|
||||
# ZIP comment
|
||||
{:zip_comment, _}, acc ->
|
||||
{:cont, acc}
|
||||
|
||||
# File entry
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} ->
|
||||
with {_, type} <- {:get_type, elem(info, 2)},
|
||||
{_, true} <- {:type, safe_type?(type)},
|
||||
{_, true} <- {:safe_path, safe_path?(path)} do
|
||||
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||
else
|
||||
{:get_type, e} ->
|
||||
{:halt,
|
||||
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
||||
|
||||
{:type, _} ->
|
||||
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||
|
||||
{:safe_path, _} ->
|
||||
{:halt, {:error, "Unsafe path in ZIP: #{path}"}}
|
||||
end
|
||||
|
||||
# new OTP version?
|
||||
_, _acc ->
|
||||
{:halt, {:error, "Unknown ZIP record type"}}
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_archive_and_list_files(binary() | [char()], [term()]) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_list_files(archive, opts \\ []) do
|
||||
check_safe_archive_and_maybe_list_files(archive, opts, true)
|
||||
end
|
||||
|
||||
@spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()}
|
||||
defp check_safe_archive(archive, opts \\ []) do
|
||||
case check_safe_archive_and_maybe_list_files(archive, opts, false) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_file_list([text()], text()) :: :ok | {:error, term()}
|
||||
defp check_safe_file_list([], _), do: :ok
|
||||
|
||||
defp check_safe_file_list([path | tail], cwd) do
|
||||
with {_, true} <- {:path, safe_path?(path)},
|
||||
{_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))},
|
||||
{_, true} <- {:type, safe_type?(fstat.type)} do
|
||||
check_safe_file_list(tail, cwd)
|
||||
else
|
||||
{:path, _} ->
|
||||
{:error, "Unsafe path escaping cwd: #{path}"}
|
||||
|
||||
{:stat, e} ->
|
||||
{:error, "Unable to check file type of #{path}: #{inspect(e)}"}
|
||||
|
||||
{:type, _} ->
|
||||
{:error, "Unsafe type at #{path}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"}
|
||||
|
||||
@doc """
|
||||
Checks whether the archive data contais file entries for all paths from fset
|
||||
|
||||
Note this really only accepts entries corresponding to regular _files_,
|
||||
if a path is contained as for example an directory, this does not count as a match.
|
||||
"""
|
||||
@spec contains_all_data?(binary(), MapSet.t()) :: true | false
|
||||
def contains_all_data?(archive_data, fset) do
|
||||
with {:ok, table} <- :zip.table(archive_data) do
|
||||
remaining =
|
||||
Enum.reduce(table, fset, fn
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, fset ->
|
||||
if elem(info, 2) == :regular do
|
||||
MapSet.delete(fset, path)
|
||||
else
|
||||
fset
|
||||
end
|
||||
|
||||
_, _ ->
|
||||
fset
|
||||
end)
|
||||
|> MapSet.size()
|
||||
|
||||
if remaining == 0, do: true, else: false
|
||||
else
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
List all file entries in ZIP, or error if invalid or unsafe.
|
||||
|
||||
Note this really only lists regular files, no directories, ZIP comments or other types!
|
||||
"""
|
||||
@spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()}
|
||||
def list_dir_file(archive) do
|
||||
path = to_charlist(archive)
|
||||
check_safe_archive_and_list_files(path)
|
||||
end
|
||||
|
||||
defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}}
|
||||
defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)}
|
||||
defp stringify_zip(ret), do: ret
|
||||
|
||||
@spec zip(text(), text(), [text()], boolean()) ::
|
||||
{:ok, file_name :: String.t()}
|
||||
| {:ok, {file_name :: String.t(), file_data :: binary()}}
|
||||
| {:error, reason :: term()}
|
||||
def zip(name, file_list, cwd, memory \\ false) do
|
||||
opts = [{:cwd, to_charlist(cwd)}]
|
||||
opts = if memory, do: [:memory | opts], else: opts
|
||||
|
||||
with :ok <- check_safe_file_list(file_list, cwd) do
|
||||
file_list = for f <- file_list, do: to_charlist(f)
|
||||
name = to_charlist(name)
|
||||
stringify_zip(:zip.zip(name, file_list, opts))
|
||||
end
|
||||
end
|
||||
|
||||
@spec unzip_file(text(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_file(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(to_charlist(archive), to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
@spec unzip_data(binary(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_data(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(archive, to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}),
|
||||
do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)}
|
||||
|
||||
defp stringify_unzip({:ok, [_fname | _] = filelist}),
|
||||
do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)}
|
||||
|
||||
defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}}
|
||||
defp stringify_unzip(ret), do: ret
|
||||
|
||||
@spec do_unzip(binary() | [char()], text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
defp do_unzip(archive, target_dir, file_list) do
|
||||
opts =
|
||||
if file_list != nil do
|
||||
[
|
||||
file_list: for(f <- file_list, do: to_charlist(f)),
|
||||
cwd: target_dir
|
||||
]
|
||||
else
|
||||
[cwd: target_dir]
|
||||
end
|
||||
|
||||
with :ok <- check_safe_archive(archive) do
|
||||
stringify_unzip(:zip.unzip(archive, opts))
|
||||
end
|
||||
end
|
||||
end
|
|
@ -90,9 +90,13 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
{:ok, rgb} =
|
||||
if Image.has_alpha?(resized_image) do
|
||||
# remove alpha channel
|
||||
resized_image
|
||||
|> Operation.extract_band!(0, n: 3)
|
||||
|> Image.write_to_binary()
|
||||
case Operation.extract_band(resized_image, 0, n: 3) do
|
||||
{:ok, data} ->
|
||||
Image.write_to_binary(data)
|
||||
|
||||
_ ->
|
||||
Image.write_to_binary(resized_image)
|
||||
end
|
||||
else
|
||||
Image.write_to_binary(resized_image)
|
||||
end
|
||||
|
|
|
@ -19,6 +19,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Emoji
|
||||
alias Pleroma.FollowingRelationship
|
||||
alias Pleroma.Formatter
|
||||
alias Pleroma.Hashtag
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.Keys
|
||||
alias Pleroma.MFA
|
||||
|
@ -27,6 +28,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Registration
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.HashtagFollow
|
||||
alias Pleroma.UserRelationship
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Builder
|
||||
|
@ -174,6 +176,12 @@ defmodule Pleroma.User do
|
|||
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
|
||||
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
|
||||
|
||||
many_to_many(:followed_hashtags, Hashtag,
|
||||
on_replace: :delete,
|
||||
on_delete: :delete_all,
|
||||
join_through: HashtagFollow
|
||||
)
|
||||
|
||||
for {relationship_type,
|
||||
[
|
||||
{outgoing_relation, outgoing_relation_target},
|
||||
|
@ -2861,4 +2869,54 @@ defmodule Pleroma.User do
|
|||
birthday_month: month
|
||||
})
|
||||
end
|
||||
|
||||
defp maybe_load_followed_hashtags(%User{followed_hashtags: follows} = user)
|
||||
when is_list(follows),
|
||||
do: user
|
||||
|
||||
defp maybe_load_followed_hashtags(%User{} = user) do
|
||||
followed_hashtags = HashtagFollow.get_by_user(user)
|
||||
%{user | followed_hashtags: followed_hashtags}
|
||||
end
|
||||
|
||||
def followed_hashtags(%User{followed_hashtags: follows})
|
||||
when is_list(follows),
|
||||
do: follows
|
||||
|
||||
def followed_hashtags(%User{} = user) do
|
||||
{:ok, user} =
|
||||
user
|
||||
|> maybe_load_followed_hashtags()
|
||||
|> set_cache()
|
||||
|
||||
user.followed_hashtags
|
||||
end
|
||||
|
||||
def follow_hashtag(%User{} = user, %Hashtag{} = hashtag) do
|
||||
Logger.debug("Follow hashtag #{hashtag.name} for user #{user.nickname}")
|
||||
user = maybe_load_followed_hashtags(user)
|
||||
|
||||
with {:ok, _} <- HashtagFollow.new(user, hashtag),
|
||||
follows <- HashtagFollow.get_by_user(user),
|
||||
%User{} = user <- user |> Map.put(:followed_hashtags, follows) do
|
||||
user
|
||||
|> set_cache()
|
||||
end
|
||||
end
|
||||
|
||||
def unfollow_hashtag(%User{} = user, %Hashtag{} = hashtag) do
|
||||
Logger.debug("Unfollow hashtag #{hashtag.name} for user #{user.nickname}")
|
||||
user = maybe_load_followed_hashtags(user)
|
||||
|
||||
with {:ok, _} <- HashtagFollow.delete(user, hashtag),
|
||||
follows <- HashtagFollow.get_by_user(user),
|
||||
%User{} = user <- user |> Map.put(:followed_hashtags, follows) do
|
||||
user
|
||||
|> set_cache()
|
||||
end
|
||||
end
|
||||
|
||||
def following_hashtag?(%User{} = user, %Hashtag{} = hashtag) do
|
||||
not is_nil(HashtagFollow.get(user, hashtag))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.User.Backup do
|
|||
alias Pleroma.Bookmark
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.SafeZip
|
||||
alias Pleroma.Uploaders.Uploader
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
|
@ -179,12 +180,12 @@ defmodule Pleroma.User.Backup do
|
|||
end
|
||||
|
||||
@files [
|
||||
~c"actor.json",
|
||||
~c"outbox.json",
|
||||
~c"likes.json",
|
||||
~c"bookmarks.json",
|
||||
~c"followers.json",
|
||||
~c"following.json"
|
||||
"actor.json",
|
||||
"outbox.json",
|
||||
"likes.json",
|
||||
"bookmarks.json",
|
||||
"followers.json",
|
||||
"following.json"
|
||||
]
|
||||
|
||||
@spec run(t()) :: {:ok, t()} | {:error, :failed}
|
||||
|
@ -200,7 +201,7 @@ defmodule Pleroma.User.Backup do
|
|||
{_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
|
||||
{_, :ok} <- {:following, following(backup.tempdir, backup.user)},
|
||||
{_, {:ok, _zip_path}} <-
|
||||
{:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
|
||||
{:zip, SafeZip.zip(tempfile, @files, backup.tempdir)},
|
||||
{_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
|
||||
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
|
||||
{:ok, updated_backup}
|
||||
|
@ -246,7 +247,13 @@ defmodule Pleroma.User.Backup do
|
|||
defp actor(dir, user) do
|
||||
with {:ok, json} <-
|
||||
UserView.render("user.json", %{user: user})
|
||||
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|
||||
|> Map.merge(%{
|
||||
"bookmarks" => "bookmarks.json",
|
||||
"likes" => "likes.json",
|
||||
"outbox" => "outbox.json",
|
||||
"followers" => "followers.json",
|
||||
"following" => "following.json"
|
||||
})
|
||||
|> Jason.encode() do
|
||||
File.write(Path.join(dir, "actor.json"), json)
|
||||
end
|
||||
|
|
55
lib/pleroma/user/hashtag_follow.ex
Normal file
55
lib/pleroma/user/hashtag_follow.ex
Normal file
|
@ -0,0 +1,55 @@
|
|||
defmodule Pleroma.User.HashtagFollow do
|
||||
use Ecto.Schema
|
||||
import Ecto.Query
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Pleroma.Hashtag
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
schema "user_follows_hashtag" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:hashtag, Hashtag)
|
||||
end
|
||||
|
||||
def changeset(%__MODULE__{} = user_hashtag_follow, attrs) do
|
||||
user_hashtag_follow
|
||||
|> cast(attrs, [:user_id, :hashtag_id])
|
||||
|> unique_constraint(:hashtag_id,
|
||||
name: :user_hashtag_follows_user_id_hashtag_id_index,
|
||||
message: "already following"
|
||||
)
|
||||
|> validate_required([:user_id, :hashtag_id])
|
||||
end
|
||||
|
||||
def new(%User{} = user, %Hashtag{} = hashtag) do
|
||||
%__MODULE__{}
|
||||
|> changeset(%{user_id: user.id, hashtag_id: hashtag.id})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
def delete(%User{} = user, %Hashtag{} = hashtag) do
|
||||
with %__MODULE__{} = user_hashtag_follow <- get(user, hashtag) do
|
||||
Repo.delete(user_hashtag_follow)
|
||||
else
|
||||
_ -> {:ok, nil}
|
||||
end
|
||||
end
|
||||
|
||||
def get(%User{} = user, %Hashtag{} = hashtag) do
|
||||
from(hf in __MODULE__)
|
||||
|> where([hf], hf.user_id == ^user.id and hf.hashtag_id == ^hashtag.id)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
def get_by_user(%User{} = user) do
|
||||
user
|
||||
|> followed_hashtags_query()
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def followed_hashtags_query(%User{} = user) do
|
||||
Ecto.assoc(user, :followed_hashtags)
|
||||
|> Ecto.Query.order_by([h], desc: h.id)
|
||||
end
|
||||
end
|
|
@ -924,6 +924,31 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
)
|
||||
end
|
||||
|
||||
# Essentially, either look for activities addressed to `recipients`, _OR_ ones
|
||||
# that reference a hashtag that the user follows
|
||||
# Firstly, two fallbacks in case there's no hashtag constraint, or the user doesn't
|
||||
# follow any
|
||||
defp restrict_recipients_or_hashtags(query, recipients, user, nil) do
|
||||
restrict_recipients(query, recipients, user)
|
||||
end
|
||||
|
||||
defp restrict_recipients_or_hashtags(query, recipients, user, []) do
|
||||
restrict_recipients(query, recipients, user)
|
||||
end
|
||||
|
||||
defp restrict_recipients_or_hashtags(query, recipients, _user, hashtag_ids) do
|
||||
from([activity, object] in query)
|
||||
|> join(:left, [activity, object], hto in "hashtags_objects",
|
||||
on: hto.object_id == object.id,
|
||||
as: :hto
|
||||
)
|
||||
|> where(
|
||||
[activity, object, hto: hto],
|
||||
(hto.hashtag_id in ^hashtag_ids and ^Constants.as_public() in activity.recipients) or
|
||||
fragment("? && ?", ^recipients, activity.recipients)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_local(query, %{local_only: true}) do
|
||||
from(activity in query, where: activity.local == true)
|
||||
end
|
||||
|
@ -1414,7 +1439,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> maybe_preload_report_notes(opts)
|
||||
|> maybe_set_thread_muted_field(opts)
|
||||
|> maybe_order(opts)
|
||||
|> restrict_recipients(recipients, opts[:user])
|
||||
|> restrict_recipients_or_hashtags(recipients, opts[:user], opts[:followed_hashtags])
|
||||
|> restrict_replies(opts)
|
||||
|> restrict_since(opts)
|
||||
|> restrict_local(opts)
|
||||
|
|
|
@ -1,146 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
|
||||
@moduledoc """
|
||||
Dynamic activity filtering based on an RBL database
|
||||
|
||||
This MRF makes queries to a custom DNS server which will
|
||||
respond with values indicating the classification of the domain
|
||||
the activity originated from. This method has been widely used
|
||||
in the email anti-spam industry for very fast reputation checks.
|
||||
|
||||
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
|
||||
Other values such as 127.0.0.2 may be used for specific classifications.
|
||||
|
||||
Information for why the host is blocked can be stored in a corresponding TXT record.
|
||||
|
||||
This method is fail-open so if the queries fail the activites are accepted.
|
||||
|
||||
An example of software meant for this purpsoe is rbldnsd which can be found
|
||||
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
|
||||
https://git.pleroma.social/feld/rbldnsd
|
||||
|
||||
It is highly recommended that you run your own copy of rbldnsd and use an
|
||||
external mechanism to sync/share the contents of the zone file. This is
|
||||
important to keep the latency on the queries as low as possible and prevent
|
||||
your DNS server from being attacked so it fails and content is permitted.
|
||||
"""
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
alias Pleroma.Config
|
||||
|
||||
require Logger
|
||||
|
||||
@query_retries 1
|
||||
@query_timeout 500
|
||||
|
||||
@impl true
|
||||
def filter(%{"actor" => actor} = activity) do
|
||||
actor_info = URI.parse(actor)
|
||||
|
||||
with {:ok, activity} <- check_rbl(actor_info, activity) do
|
||||
{:ok, activity}
|
||||
else
|
||||
_ -> {:reject, "[DNSRBLPolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe do
|
||||
mrf_dnsrbl =
|
||||
Config.get(:mrf_dnsrbl)
|
||||
|> Enum.into(%{})
|
||||
|
||||
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def config_description do
|
||||
%{
|
||||
key: :mrf_dnsrbl,
|
||||
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
|
||||
label: "MRF DNSRBL",
|
||||
description: "DNS RealTime Blackhole Policy",
|
||||
children: [
|
||||
%{
|
||||
key: :nameserver,
|
||||
type: {:string},
|
||||
description: "DNSRBL Nameserver to Query (IP or hostame)",
|
||||
suggestions: ["127.0.0.1"]
|
||||
},
|
||||
%{
|
||||
key: :port,
|
||||
type: {:string},
|
||||
description: "Nameserver port",
|
||||
suggestions: ["53"]
|
||||
},
|
||||
%{
|
||||
key: :zone,
|
||||
type: {:string},
|
||||
description: "Root zone for querying",
|
||||
suggestions: ["bl.pleroma.com"]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
defp check_rbl(%{host: actor_host}, activity) do
|
||||
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
|
||||
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
|
||||
query =
|
||||
Enum.join([actor_host, zone], ".")
|
||||
|> String.to_charlist()
|
||||
|
||||
rbl_response = rblquery(query)
|
||||
|
||||
if Enum.empty?(rbl_response) do
|
||||
{:ok, activity}
|
||||
else
|
||||
Task.start(fn ->
|
||||
reason =
|
||||
case rblquery(query, :txt) do
|
||||
[[result]] -> result
|
||||
_ -> "undefined"
|
||||
end
|
||||
|
||||
Logger.warning(
|
||||
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
|
||||
)
|
||||
end)
|
||||
|
||||
:error
|
||||
end
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_rblhost_ip(rblhost) do
|
||||
case rblhost |> String.to_charlist() |> :inet_parse.address() do
|
||||
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
|
||||
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
|
||||
end
|
||||
end
|
||||
|
||||
defp rblquery(query, type \\ :a) do
|
||||
config = Config.get([:mrf_dnsrbl])
|
||||
|
||||
case get_rblhost_ip(config[:nameserver]) do
|
||||
{:ok, rblnsip} ->
|
||||
:inet_res.lookup(query, :in, type,
|
||||
nameservers: [{rblnsip, config[:port]}],
|
||||
timeout: @query_timeout,
|
||||
retry: @query_retries
|
||||
)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,53 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.FODirectReply do
|
||||
@moduledoc """
|
||||
FODirectReply alters the scope of replies to activities which are Followers Only to be Direct. The purpose of this policy is to prevent broken threads for followers of the reply author because their response was to a user that they are not also following.
|
||||
"""
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
"type" => "Create",
|
||||
"to" => to,
|
||||
"object" => %{
|
||||
"actor" => actor,
|
||||
"type" => "Note",
|
||||
"inReplyTo" => in_reply_to
|
||||
}
|
||||
} = activity
|
||||
) do
|
||||
with true <- is_binary(in_reply_to),
|
||||
%User{follower_address: followers_collection, local: true} <- User.get_by_ap_id(actor),
|
||||
%Object{} = in_reply_to_object <- Object.get_by_ap_id(in_reply_to),
|
||||
"private" <- Visibility.get_visibility(in_reply_to_object) do
|
||||
direct_to = to -- [followers_collection]
|
||||
|
||||
updated_activity =
|
||||
activity
|
||||
|> Map.put("cc", [])
|
||||
|> Map.put("to", direct_to)
|
||||
|> Map.put("directMessage", true)
|
||||
|> put_in(["object", "cc"], [])
|
||||
|> put_in(["object", "to"], direct_to)
|
||||
|
||||
{:ok, updated_activity}
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
end
|
|
@ -1,60 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.QuietReply do
|
||||
@moduledoc """
|
||||
QuietReply alters the scope of activities from local users when replying by enforcing them to be "Unlisted" or "Quiet Public". This delivers the activity to all the expected recipients and instances, but it will not be published in the Federated / The Whole Known Network timelines. It will still be published to the Home timelines of the user's followers and visible to anyone who opens the thread.
|
||||
"""
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.User
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
@impl true
|
||||
def history_awareness, do: :auto
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
"type" => "Create",
|
||||
"to" => to,
|
||||
"cc" => cc,
|
||||
"object" => %{
|
||||
"actor" => actor,
|
||||
"type" => "Note",
|
||||
"inReplyTo" => in_reply_to
|
||||
}
|
||||
} = activity
|
||||
) do
|
||||
with true <- is_binary(in_reply_to),
|
||||
false <- match?([], cc),
|
||||
%User{follower_address: followers_collection, local: true} <-
|
||||
User.get_by_ap_id(actor) do
|
||||
updated_to =
|
||||
to
|
||||
|> Kernel.++([followers_collection])
|
||||
|> Kernel.--([Pleroma.Constants.as_public()])
|
||||
|
||||
updated_cc = [Pleroma.Constants.as_public()]
|
||||
|
||||
updated_activity =
|
||||
activity
|
||||
|> Map.put("to", updated_to)
|
||||
|> Map.put("cc", updated_cc)
|
||||
|> put_in(["object", "to"], updated_to)
|
||||
|> put_in(["object", "cc"], updated_cc)
|
||||
|
||||
{:ok, updated_activity}
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
end
|
|
@ -26,6 +26,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
|||
alias Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CreateGenericValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator
|
||||
|
@ -115,7 +116,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
|||
meta
|
||||
)
|
||||
when objtype in ~w[Question Answer Audio Video Image Event Article Note Page] do
|
||||
with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object),
|
||||
with {:ok, object_data} <-
|
||||
object
|
||||
|> CommonFixes.maybe_add_language_from_activity(create_activity)
|
||||
|> cast_and_apply_and_stringify_with_history(),
|
||||
meta = Keyword.put(meta, :object_data, object_data),
|
||||
{:ok, create_activity} <-
|
||||
create_activity
|
||||
|
@ -165,7 +169,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
|||
)
|
||||
when objtype in ~w[Question Answer Audio Video Event Article Note Page] do
|
||||
with {_, false} <- {:local, Access.get(meta, :local, false)},
|
||||
{_, {:ok, object_data, _}} <- {:object_validation, validate(object, meta)},
|
||||
{_, {:ok, object_data, _}} <-
|
||||
{:object_validation,
|
||||
object
|
||||
|> CommonFixes.maybe_add_language_from_activity(update_activity)
|
||||
|> validate(meta)},
|
||||
meta = Keyword.put(meta, :object_data, object_data),
|
||||
{:ok, update_activity} <-
|
||||
update_activity
|
||||
|
|
|
@ -30,7 +30,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do
|
|||
|
||||
def cast_and_apply(data) do
|
||||
data
|
||||
|> cast_data
|
||||
|> cast_data()
|
||||
|> apply_action(:insert)
|
||||
end
|
||||
|
||||
|
@ -85,8 +85,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do
|
|||
|> fix_replies()
|
||||
|> fix_attachments()
|
||||
|> CommonFixes.fix_quote_url()
|
||||
|> CommonFixes.fix_likes()
|
||||
|> Transmogrifier.fix_emoji()
|
||||
|> Transmogrifier.fix_content_map()
|
||||
|> CommonFixes.maybe_add_language()
|
||||
|> CommonFixes.maybe_add_content_map()
|
||||
end
|
||||
|
||||
def changeset(struct, data) do
|
||||
|
|
|
@ -100,6 +100,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator do
|
|||
|> CommonFixes.fix_actor()
|
||||
|> CommonFixes.fix_object_defaults()
|
||||
|> CommonFixes.fix_quote_url()
|
||||
|> CommonFixes.fix_likes()
|
||||
|> Transmogrifier.fix_emoji()
|
||||
|> fix_url()
|
||||
|> fix_content()
|
||||
|
|
|
@ -31,6 +31,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do
|
|||
defmacro object_fields do
|
||||
quote bind_quoted: binding() do
|
||||
field(:content, :string)
|
||||
field(:contentMap, ObjectValidators.ContentLanguageMap)
|
||||
|
||||
field(:published, ObjectValidators.DateTime)
|
||||
field(:updated, ObjectValidators.DateTime)
|
||||
|
@ -58,6 +59,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do
|
|||
field(:like_count, :integer, default: 0)
|
||||
field(:announcement_count, :integer, default: 0)
|
||||
field(:quotes_count, :integer, default: 0)
|
||||
field(:language, ObjectValidators.LanguageCode)
|
||||
field(:inReplyTo, ObjectValidators.ObjectID)
|
||||
field(:quoteUrl, ObjectValidators.ObjectID)
|
||||
field(:url, ObjectValidators.BareUri)
|
||||
|
|
|
@ -11,6 +11,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [good_locale_code?: 1]
|
||||
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
|
||||
|
@ -114,6 +119,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
|
||||
def fix_quote_url(data), do: data
|
||||
|
||||
# On Mastodon, `"likes"` attribute includes an inlined `Collection` with `totalItems`,
|
||||
# not a list of users.
|
||||
# https://github.com/mastodon/mastodon/pull/32007
|
||||
def fix_likes(%{"likes" => %{}} = data), do: Map.drop(data, ["likes"])
|
||||
|
||||
def fix_likes(data), do: data
|
||||
|
||||
# https://codeberg.org/fediverse/fep/src/branch/main/fep/e232/fep-e232.md
|
||||
def object_link_tag?(%{
|
||||
"type" => "Link",
|
||||
|
@ -125,4 +137,60 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
end
|
||||
|
||||
def object_link_tag?(_), do: false
|
||||
|
||||
def maybe_add_language_from_activity(object, activity) do
|
||||
language = get_language_from_context(activity)
|
||||
|
||||
if language do
|
||||
Map.put(object, "language", language)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_add_language(object) do
|
||||
language =
|
||||
[
|
||||
get_language_from_context(object),
|
||||
get_language_from_content_map(object)
|
||||
]
|
||||
|> Enum.find(&good_locale_code?(&1))
|
||||
|
||||
if language do
|
||||
Map.put(object, "language", language)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
defp get_language_from_context(%{"@context" => context}) when is_list(context) do
|
||||
case context
|
||||
|> Enum.find(fn
|
||||
%{"@language" => language} -> language != "und"
|
||||
_ -> nil
|
||||
end) do
|
||||
%{"@language" => language} -> language
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp get_language_from_context(_), do: nil
|
||||
|
||||
defp get_language_from_content_map(%{"contentMap" => content_map, "content" => source_content}) do
|
||||
content_groups = Map.to_list(content_map)
|
||||
|
||||
case Enum.find(content_groups, fn {_, content} -> content == source_content end) do
|
||||
{language, _} -> language
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp get_language_from_content_map(_), do: nil
|
||||
|
||||
def maybe_add_content_map(%{"language" => language, "content" => content} = object)
|
||||
when not_empty_string(language) do
|
||||
Map.put(object, "contentMap", Map.put(%{}, language, content))
|
||||
end
|
||||
|
||||
def maybe_add_content_map(object), do: object
|
||||
end
|
||||
|
|
|
@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
|
|||
|
||||
def cast_and_apply(data) do
|
||||
data
|
||||
|> cast_data
|
||||
|> cast_data()
|
||||
|> apply_action(:insert)
|
||||
end
|
||||
|
||||
|
@ -38,6 +38,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
|
|||
|> validate_data()
|
||||
end
|
||||
|
||||
@spec cast_data(map()) :: map()
|
||||
def cast_data(data) do
|
||||
%__MODULE__{}
|
||||
|> changeset(data)
|
||||
|
@ -47,7 +48,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
|
|||
data
|
||||
|> CommonFixes.fix_actor()
|
||||
|> CommonFixes.fix_object_defaults()
|
||||
|> CommonFixes.fix_likes()
|
||||
|> Transmogrifier.fix_emoji()
|
||||
|> CommonFixes.maybe_add_language()
|
||||
|> CommonFixes.maybe_add_content_map()
|
||||
end
|
||||
|
||||
def changeset(struct, data) do
|
||||
|
|
|
@ -64,6 +64,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
|
|||
|> CommonFixes.fix_actor()
|
||||
|> CommonFixes.fix_object_defaults()
|
||||
|> CommonFixes.fix_quote_url()
|
||||
|> CommonFixes.fix_likes()
|
||||
|> Transmogrifier.fix_emoji()
|
||||
|> fix_closed()
|
||||
end
|
||||
|
|
|
@ -16,12 +16,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Builder
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||
alias Pleroma.Web.ActivityPub.Pipeline
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.Federator
|
||||
|
||||
import Ecto.Query
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
|
@ -41,6 +43,38 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|> fix_content_map()
|
||||
|> fix_addressing()
|
||||
|> fix_summary()
|
||||
|> fix_history(&fix_object/1)
|
||||
end
|
||||
|
||||
defp maybe_fix_object(%{"attributedTo" => _} = object), do: fix_object(object)
|
||||
defp maybe_fix_object(object), do: object
|
||||
|
||||
defp fix_history(%{"formerRepresentations" => %{"orderedItems" => list}} = obj, fix_fun)
|
||||
when is_list(list) do
|
||||
update_in(obj["formerRepresentations"]["orderedItems"], fn h -> Enum.map(h, fix_fun) end)
|
||||
end
|
||||
|
||||
defp fix_history(obj, _), do: obj
|
||||
|
||||
defp fix_recursive(obj, fun) do
|
||||
# unlike Erlang, Elixir does not support recursive inline functions
|
||||
# which would allow us to avoid reconstructing this on every recursion
|
||||
rec_fun = fn
|
||||
obj when is_map(obj) -> fix_recursive(obj, fun)
|
||||
# there may be simple AP IDs in history (or object field)
|
||||
obj -> obj
|
||||
end
|
||||
|
||||
obj
|
||||
|> fun.()
|
||||
|> fix_history(rec_fun)
|
||||
|> then(fn
|
||||
%{"object" => object} = doc when is_map(object) ->
|
||||
update_in(doc["object"], rec_fun)
|
||||
|
||||
apdoc ->
|
||||
apdoc
|
||||
end)
|
||||
end
|
||||
|
||||
def fix_summary(%{"summary" => nil} = object) do
|
||||
|
@ -166,7 +200,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|
||||
def fix_quote_url_and_maybe_fetch(object, options \\ []) do
|
||||
quote_url =
|
||||
case Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes.fix_quote_url(object) do
|
||||
case CommonFixes.fix_quote_url(object) do
|
||||
%{"quoteUrl" => quote_url} -> quote_url
|
||||
_ -> nil
|
||||
end
|
||||
|
@ -336,6 +370,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|
||||
def fix_tag(object), do: object
|
||||
|
||||
# prefer content over contentMap
|
||||
def fix_content_map(%{"content" => content} = object) when not_empty_string(content), do: object
|
||||
|
||||
# content map usually only has one language so this will do for now.
|
||||
def fix_content_map(%{"contentMap" => content_map} = object) do
|
||||
content_groups = Map.to_list(content_map)
|
||||
|
@ -370,11 +407,18 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end)
|
||||
end
|
||||
|
||||
def handle_incoming(data, options \\ [])
|
||||
def handle_incoming(data, options \\ []) do
|
||||
data
|
||||
|> fix_recursive(&strip_internal_fields/1)
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
||||
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
||||
# with nil ID.
|
||||
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => "Flag", "object" => objects, "actor" => actor} = data,
|
||||
_options
|
||||
) do
|
||||
with context <- data["context"] || Utils.generate_context_id(),
|
||||
content <- data["content"] || "",
|
||||
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
||||
|
@ -395,16 +439,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
|
||||
# disallow objects with bogus IDs
|
||||
def handle_incoming(%{"id" => nil}, _options), do: :error
|
||||
def handle_incoming(%{"id" => ""}, _options), do: :error
|
||||
defp handle_incoming_normalized(%{"id" => nil}, _options), do: :error
|
||||
defp handle_incoming_normalized(%{"id" => ""}, _options), do: :error
|
||||
# length of https:// = 8, should validate better, but good enough for now.
|
||||
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
|
||||
do: :error
|
||||
defp handle_incoming_normalized(%{"id" => id}, _options)
|
||||
when is_binary(id) and byte_size(id) < 8,
|
||||
do: :error
|
||||
|
||||
def handle_incoming(
|
||||
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
||||
options
|
||||
) do
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
||||
options
|
||||
) do
|
||||
actor = Containment.get_actor(data)
|
||||
|
||||
data =
|
||||
|
@ -446,25 +491,25 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
"star" => "⭐"
|
||||
}
|
||||
|
||||
@doc "Rewrite misskey likes into EmojiReacts"
|
||||
def handle_incoming(
|
||||
%{
|
||||
"type" => "Like",
|
||||
"_misskey_reaction" => reaction
|
||||
} = data,
|
||||
options
|
||||
) do
|
||||
# Rewrite misskey likes into EmojiReacts
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Like",
|
||||
"_misskey_reaction" => reaction
|
||||
} = data,
|
||||
options
|
||||
) do
|
||||
data
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
||||
|> handle_incoming(options)
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
||||
options
|
||||
)
|
||||
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
||||
options
|
||||
)
|
||||
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
|
||||
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
|
||||
|
||||
object =
|
||||
|
@ -487,8 +532,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(%{"type" => type} = data, _options)
|
||||
when type in ~w{Like EmojiReact Announce Add Remove} do
|
||||
defp handle_incoming_normalized(%{"type" => type} = data, _options)
|
||||
when type in ~w{Like EmojiReact Announce Add Remove} do
|
||||
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
||||
{:ok, activity, _meta} <-
|
||||
Pipeline.common_pipeline(data, local: false) do
|
||||
|
@ -498,11 +543,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{"type" => type} = data,
|
||||
_options
|
||||
)
|
||||
when type in ~w{Update Block Follow Accept Reject} do
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => type} = data,
|
||||
_options
|
||||
)
|
||||
when type in ~w{Update Block Follow Accept Reject} do
|
||||
fixed_obj = maybe_fix_object(data["object"])
|
||||
data = if fixed_obj != nil, do: %{data | "object" => fixed_obj}, else: data
|
||||
|
||||
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
||||
{:ok, activity, _} <-
|
||||
Pipeline.common_pipeline(data, local: false) do
|
||||
|
@ -510,10 +558,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{"type" => "Delete"} = data,
|
||||
_options
|
||||
) do
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => "Delete"} = data,
|
||||
_options
|
||||
) do
|
||||
with {:ok, activity, _} <-
|
||||
Pipeline.common_pipeline(data, local: false) do
|
||||
{:ok, activity}
|
||||
|
@ -536,15 +584,15 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => %{"type" => "Follow", "object" => followed},
|
||||
"actor" => follower,
|
||||
"id" => id
|
||||
} = _data,
|
||||
_options
|
||||
) do
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => %{"type" => "Follow", "object" => followed},
|
||||
"actor" => follower,
|
||||
"id" => id
|
||||
} = _data,
|
||||
_options
|
||||
) do
|
||||
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
||||
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
||||
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
||||
|
@ -555,46 +603,46 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => %{"type" => type}
|
||||
} = data,
|
||||
_options
|
||||
)
|
||||
when type in ["Like", "EmojiReact", "Announce", "Block"] do
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => %{"type" => type}
|
||||
} = data,
|
||||
_options
|
||||
)
|
||||
when type in ["Like", "EmojiReact", "Announce", "Block"] do
|
||||
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
||||
{:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
# For Undos that don't have the complete object attached, try to find it in our database.
|
||||
def handle_incoming(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => object
|
||||
} = activity,
|
||||
options
|
||||
)
|
||||
when is_binary(object) do
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Undo",
|
||||
"object" => object
|
||||
} = activity,
|
||||
options
|
||||
)
|
||||
when is_binary(object) do
|
||||
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
||||
activity
|
||||
|> Map.put("object", data)
|
||||
|> handle_incoming(options)
|
||||
|> handle_incoming_normalized(options)
|
||||
else
|
||||
_e -> :error
|
||||
end
|
||||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{
|
||||
"type" => "Move",
|
||||
"actor" => origin_actor,
|
||||
"object" => origin_actor,
|
||||
"target" => target_actor
|
||||
},
|
||||
_options
|
||||
) do
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Move",
|
||||
"actor" => origin_actor,
|
||||
"object" => origin_actor,
|
||||
"target" => target_actor
|
||||
},
|
||||
_options
|
||||
) do
|
||||
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
||||
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
|
||||
true <- origin_actor in target_user.also_known_as do
|
||||
|
@ -604,7 +652,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def handle_incoming(_, _), do: :error
|
||||
defp handle_incoming_normalized(_, _), do: :error
|
||||
|
||||
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
||||
def get_obj_helper(id, options \\ []) do
|
||||
|
@ -716,6 +764,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|> set_reply_to_uri
|
||||
|> set_quote_url
|
||||
|> set_replies
|
||||
|> CommonFixes.maybe_add_content_map()
|
||||
|> strip_internal_fields
|
||||
|> strip_internal_tags
|
||||
|> set_type
|
||||
|
@ -750,12 +799,11 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
object_id
|
||||
|> Object.normalize(fetch: false)
|
||||
|> Map.get(:data)
|
||||
|> prepare_object
|
||||
|
||||
data =
|
||||
data
|
||||
|> Map.put("object", object)
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
|> Map.put("object", prepare_object(object))
|
||||
|> Map.merge(Utils.make_json_ld_header(object))
|
||||
|> Map.delete("bcc")
|
||||
|
||||
{:ok, data}
|
||||
|
@ -763,14 +811,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|
||||
def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data)
|
||||
when objtype in Pleroma.Constants.updatable_object_types() do
|
||||
object =
|
||||
object
|
||||
|> prepare_object
|
||||
|
||||
data =
|
||||
data
|
||||
|> Map.put("object", object)
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
|> Map.put("object", prepare_object(object))
|
||||
|> Map.merge(Utils.make_json_ld_header(object))
|
||||
|> Map.delete("bcc")
|
||||
|
||||
{:ok, data}
|
||||
|
@ -840,7 +884,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
data
|
||||
|> strip_internal_fields
|
||||
|> maybe_fix_object_url
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
|> Map.merge(Utils.make_json_ld_header(data))
|
||||
|
||||
{:ok, data}
|
||||
end
|
||||
|
|
|
@ -20,6 +20,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
alias Pleroma.Web.Router.Helpers
|
||||
|
||||
import Ecto.Query
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
@ -109,18 +110,24 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
end
|
||||
end
|
||||
|
||||
def make_json_ld_header do
|
||||
def make_json_ld_header(data \\ %{}) do
|
||||
%{
|
||||
"@context" => [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"#{Endpoint.url()}/schemas/litepub-0.1.jsonld",
|
||||
%{
|
||||
"@language" => "und"
|
||||
"@language" => get_language(data)
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
defp get_language(%{"language" => language}) when not_empty_string(language) do
|
||||
language
|
||||
end
|
||||
|
||||
defp get_language(_), do: "und"
|
||||
|
||||
def make_date do
|
||||
DateTime.utc_now() |> DateTime.to_iso8601()
|
||||
end
|
||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do
|
|||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
|
||||
def render("object.json", %{object: %Object{} = object}) do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(object.data)
|
||||
|
||||
additional = Transmogrifier.prepare_object(object.data)
|
||||
Map.merge(base, additional)
|
||||
|
@ -17,7 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do
|
|||
|
||||
def render("object.json", %{object: %Activity{data: %{"type" => activity_type}} = activity})
|
||||
when activity_type in ["Create", "Listen"] do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data)
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
additional =
|
||||
|
@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do
|
|||
end
|
||||
|
||||
def render("object.json", %{object: %Activity{} = activity}) do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data)
|
||||
object_id = Object.normalize(activity, id_only: true)
|
||||
|
||||
additional =
|
||||
|
|
|
@ -127,7 +127,8 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
"capabilities" => capabilities,
|
||||
"alsoKnownAs" => user.also_known_as,
|
||||
"vcard:bday" => birthday,
|
||||
"webfinger" => "acct:#{User.full_nickname(user)}"
|
||||
"webfinger" => "acct:#{User.full_nickname(user)}",
|
||||
"published" => Pleroma.Web.CommonAPI.Utils.to_masto_date(user.inserted_at)
|
||||
}
|
||||
|> Map.merge(
|
||||
maybe_make_image(
|
||||
|
|
|
@ -139,7 +139,8 @@ defmodule Pleroma.Web.ApiSpec do
|
|||
"Search",
|
||||
"Status actions",
|
||||
"Media attachments",
|
||||
"Bookmark folders"
|
||||
"Bookmark folders",
|
||||
"Tags"
|
||||
]
|
||||
},
|
||||
%{
|
||||
|
|
103
lib/pleroma/web/api_spec/operations/tag_operation.ex
Normal file
103
lib/pleroma/web/api_spec/operations/tag_operation.ex
Normal file
|
@ -0,0 +1,103 @@
|
|||
defmodule Pleroma.Web.ApiSpec.TagOperation do
|
||||
alias OpenApiSpex.Operation
|
||||
alias OpenApiSpex.Schema
|
||||
alias Pleroma.Web.ApiSpec.Schemas.ApiError
|
||||
alias Pleroma.Web.ApiSpec.Schemas.Tag
|
||||
|
||||
def open_api_operation(action) do
|
||||
operation = String.to_existing_atom("#{action}_operation")
|
||||
apply(__MODULE__, operation, [])
|
||||
end
|
||||
|
||||
def show_operation do
|
||||
%Operation{
|
||||
tags: ["Tags"],
|
||||
summary: "Hashtag",
|
||||
description: "View a hashtag",
|
||||
security: [%{"oAuth" => ["read"]}],
|
||||
parameters: [id_param()],
|
||||
operationId: "TagController.show",
|
||||
responses: %{
|
||||
200 => Operation.response("Hashtag", "application/json", Tag),
|
||||
404 => Operation.response("Not Found", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def follow_operation do
|
||||
%Operation{
|
||||
tags: ["Tags"],
|
||||
summary: "Follow a hashtag",
|
||||
description: "Follow a hashtag",
|
||||
security: [%{"oAuth" => ["write:follows"]}],
|
||||
parameters: [id_param()],
|
||||
operationId: "TagController.follow",
|
||||
responses: %{
|
||||
200 => Operation.response("Hashtag", "application/json", Tag),
|
||||
404 => Operation.response("Not Found", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def unfollow_operation do
|
||||
%Operation{
|
||||
tags: ["Tags"],
|
||||
summary: "Unfollow a hashtag",
|
||||
description: "Unfollow a hashtag",
|
||||
security: [%{"oAuth" => ["write:follows"]}],
|
||||
parameters: [id_param()],
|
||||
operationId: "TagController.unfollow",
|
||||
responses: %{
|
||||
200 => Operation.response("Hashtag", "application/json", Tag),
|
||||
404 => Operation.response("Not Found", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def show_followed_operation do
|
||||
%Operation{
|
||||
tags: ["Tags"],
|
||||
summary: "Followed hashtags",
|
||||
description: "View a list of hashtags the currently authenticated user is following",
|
||||
parameters: pagination_params(),
|
||||
security: [%{"oAuth" => ["read:follows"]}],
|
||||
operationId: "TagController.show_followed",
|
||||
responses: %{
|
||||
200 =>
|
||||
Operation.response("Hashtags", "application/json", %Schema{
|
||||
type: :array,
|
||||
items: Tag
|
||||
}),
|
||||
403 => Operation.response("Forbidden", "application/json", ApiError),
|
||||
404 => Operation.response("Not Found", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp id_param do
|
||||
Operation.parameter(
|
||||
:id,
|
||||
:path,
|
||||
%Schema{type: :string},
|
||||
"Name of the hashtag"
|
||||
)
|
||||
end
|
||||
|
||||
def pagination_params do
|
||||
[
|
||||
Operation.parameter(:max_id, :query, :integer, "Return items older than this ID"),
|
||||
Operation.parameter(
|
||||
:min_id,
|
||||
:query,
|
||||
:integer,
|
||||
"Return the oldest items newer than this ID"
|
||||
),
|
||||
Operation.parameter(
|
||||
:limit,
|
||||
:query,
|
||||
%Schema{type: :integer, default: 20},
|
||||
"Maximum number of items to return. Will be ignored if it's more than 40"
|
||||
)
|
||||
]
|
||||
end
|
||||
end
|
|
@ -17,11 +17,22 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Tag do
|
|||
type: :string,
|
||||
format: :uri,
|
||||
description: "A link to the hashtag on the instance"
|
||||
},
|
||||
following: %Schema{
|
||||
type: :boolean,
|
||||
description: "Whether the authenticated user is following the hashtag"
|
||||
},
|
||||
history: %Schema{
|
||||
type: :array,
|
||||
items: %Schema{type: :string},
|
||||
description:
|
||||
"A list of historical uses of the hashtag (not implemented, for compatibility only)"
|
||||
}
|
||||
},
|
||||
example: %{
|
||||
name: "cofe",
|
||||
url: "https://lain.com/tag/cofe"
|
||||
url: "https://lain.com/tag/cofe",
|
||||
following: false
|
||||
}
|
||||
})
|
||||
end
|
||||
|
|
|
@ -11,6 +11,9 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
|||
alias Pleroma.Web.CommonAPI
|
||||
alias Pleroma.Web.CommonAPI.Utils
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [good_locale_code?: 1]
|
||||
|
||||
import Pleroma.Web.Gettext
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
|
@ -38,6 +41,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
|||
cc: [],
|
||||
context: nil,
|
||||
sensitive: false,
|
||||
language: nil,
|
||||
object: nil,
|
||||
preview?: false,
|
||||
changes: %{}
|
||||
|
@ -64,6 +68,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
|||
|> content()
|
||||
|> with_valid(&to_and_cc/1)
|
||||
|> with_valid(&context/1)
|
||||
|> with_valid(&language/1)
|
||||
|> sensitive()
|
||||
|> with_valid(&object/1)
|
||||
|> preview?()
|
||||
|
@ -249,6 +254,16 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
|||
%__MODULE__{draft | sensitive: sensitive}
|
||||
end
|
||||
|
||||
defp language(draft) do
|
||||
language = draft.params[:language]
|
||||
|
||||
if good_locale_code?(language) do
|
||||
%__MODULE__{draft | language: language}
|
||||
else
|
||||
draft
|
||||
end
|
||||
end
|
||||
|
||||
defp object(draft) do
|
||||
emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji)
|
||||
|
||||
|
@ -288,6 +303,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
|||
"mediaType" => Utils.get_content_type(draft.params[:content_type])
|
||||
})
|
||||
|> Map.put("generator", draft.params[:generator])
|
||||
|> Map.put("language", draft.language)
|
||||
|
||||
%__MODULE__{draft | object: object}
|
||||
end
|
||||
|
|
77
lib/pleroma/web/mastodon_api/controllers/tag_controller.ex
Normal file
77
lib/pleroma/web/mastodon_api/controllers/tag_controller.ex
Normal file
|
@ -0,0 +1,77 @@
|
|||
defmodule Pleroma.Web.MastodonAPI.TagController do
|
||||
@moduledoc "Hashtag routes for mastodon API"
|
||||
use Pleroma.Web, :controller
|
||||
|
||||
alias Pleroma.Hashtag
|
||||
alias Pleroma.Pagination
|
||||
alias Pleroma.User
|
||||
|
||||
import Pleroma.Web.ControllerHelper,
|
||||
only: [
|
||||
add_link_headers: 2
|
||||
]
|
||||
|
||||
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||
|
||||
plug(
|
||||
Pleroma.Web.Plugs.OAuthScopesPlug,
|
||||
%{scopes: ["read"]} when action in [:show]
|
||||
)
|
||||
|
||||
plug(
|
||||
Pleroma.Web.Plugs.OAuthScopesPlug,
|
||||
%{scopes: ["read:follows"]} when action in [:show_followed]
|
||||
)
|
||||
|
||||
plug(
|
||||
Pleroma.Web.Plugs.OAuthScopesPlug,
|
||||
%{scopes: ["write:follows"]} when action in [:follow, :unfollow]
|
||||
)
|
||||
|
||||
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.TagOperation
|
||||
|
||||
def show(conn, %{id: id}) do
|
||||
with %Hashtag{} = hashtag <- Hashtag.get_by_name(id) do
|
||||
render(conn, "show.json", tag: hashtag, for_user: conn.assigns.user)
|
||||
else
|
||||
_ -> conn |> render_error(:not_found, "Hashtag not found")
|
||||
end
|
||||
end
|
||||
|
||||
def follow(conn, %{id: id}) do
|
||||
with %Hashtag{} = hashtag <- Hashtag.get_by_name(id),
|
||||
%User{} = user <- conn.assigns.user,
|
||||
{:ok, _} <-
|
||||
User.follow_hashtag(user, hashtag) do
|
||||
render(conn, "show.json", tag: hashtag, for_user: user)
|
||||
else
|
||||
_ -> render_error(conn, :not_found, "Hashtag not found")
|
||||
end
|
||||
end
|
||||
|
||||
def unfollow(conn, %{id: id}) do
|
||||
with %Hashtag{} = hashtag <- Hashtag.get_by_name(id),
|
||||
%User{} = user <- conn.assigns.user,
|
||||
{:ok, _} <-
|
||||
User.unfollow_hashtag(user, hashtag) do
|
||||
render(conn, "show.json", tag: hashtag, for_user: user)
|
||||
else
|
||||
_ -> render_error(conn, :not_found, "Hashtag not found")
|
||||
end
|
||||
end
|
||||
|
||||
def show_followed(conn, params) do
|
||||
with %{assigns: %{user: %User{} = user}} <- conn do
|
||||
params = Map.put(params, :id_type, :integer)
|
||||
|
||||
hashtags =
|
||||
user
|
||||
|> User.HashtagFollow.followed_hashtags_query()
|
||||
|> Pagination.fetch_paginated(params)
|
||||
|
||||
conn
|
||||
|> add_link_headers(hashtags)
|
||||
|> render("index.json", tags: hashtags, for_user: user)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -40,6 +40,11 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do
|
|||
|
||||
# GET /api/v1/timelines/home
|
||||
def home(%{assigns: %{user: user}} = conn, params) do
|
||||
followed_hashtags =
|
||||
user
|
||||
|> User.followed_hashtags()
|
||||
|> Enum.map(& &1.id)
|
||||
|
||||
params =
|
||||
params
|
||||
|> Map.put(:type, ["Create", "Announce"])
|
||||
|
@ -49,6 +54,7 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do
|
|||
|> Map.put(:announce_filtering_user, user)
|
||||
|> Map.put(:user, user)
|
||||
|> Map.put(:local_only, params[:local])
|
||||
|> Map.put(:followed_hashtags, followed_hashtags)
|
||||
|> Map.delete(:local)
|
||||
|
||||
activities =
|
||||
|
|
|
@ -227,7 +227,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
mentions: mentions,
|
||||
tags: reblogged[:tags] || [],
|
||||
application: build_application(object.data["generator"]),
|
||||
language: nil,
|
||||
language: get_language(object),
|
||||
emojis: [],
|
||||
pleroma: %{
|
||||
local: activity.local,
|
||||
|
@ -445,7 +445,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
mentions: mentions,
|
||||
tags: build_tags(tags),
|
||||
application: build_application(object.data["generator"]),
|
||||
language: nil,
|
||||
language: get_language(object),
|
||||
emojis: build_emojis(object.data["emoji"]),
|
||||
pleroma: %{
|
||||
local: activity.local,
|
||||
|
@ -829,6 +829,10 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
Utils.get_content_type(nil)
|
||||
end
|
||||
|
||||
defp get_language(%{data: %{"language" => "und"}}), do: nil
|
||||
|
||||
defp get_language(object), do: object.data["language"]
|
||||
|
||||
defp proxied_url(url, page_url_data) do
|
||||
if is_binary(url) do
|
||||
build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url()
|
||||
|
|
25
lib/pleroma/web/mastodon_api/views/tag_view.ex
Normal file
25
lib/pleroma/web/mastodon_api/views/tag_view.ex
Normal file
|
@ -0,0 +1,25 @@
|
|||
defmodule Pleroma.Web.MastodonAPI.TagView do
|
||||
use Pleroma.Web, :view
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.Router.Helpers
|
||||
|
||||
def render("index.json", %{tags: tags, for_user: user}) do
|
||||
safe_render_many(tags, __MODULE__, "show.json", %{for_user: user})
|
||||
end
|
||||
|
||||
def render("show.json", %{tag: tag, for_user: user}) do
|
||||
following =
|
||||
with %User{} <- user do
|
||||
User.following_hashtag?(user, tag)
|
||||
else
|
||||
_ -> false
|
||||
end
|
||||
|
||||
%{
|
||||
name: tag.name,
|
||||
url: Helpers.tag_feed_url(Pleroma.Web.Endpoint, :feed, tag.name),
|
||||
history: [],
|
||||
following: following
|
||||
}
|
||||
end
|
||||
end
|
|
@ -71,11 +71,15 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
|
|||
drop_static_param_and_redirect(conn)
|
||||
|
||||
content_type == "image/gif" ->
|
||||
redirect(conn, external: media_proxy_url)
|
||||
conn
|
||||
|> put_status(301)
|
||||
|> redirect(external: media_proxy_url)
|
||||
|
||||
min_content_length_for_preview() > 0 and content_length > 0 and
|
||||
content_length < min_content_length_for_preview() ->
|
||||
redirect(conn, external: media_proxy_url)
|
||||
conn
|
||||
|> put_status(301)
|
||||
|> redirect(external: media_proxy_url)
|
||||
|
||||
true ->
|
||||
handle_preview(content_type, conn, media_proxy_url)
|
||||
|
|
|
@ -78,10 +78,10 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do
|
|||
# object when a Video or GIF is attached it will display that in Whatsapp Rich Preview.
|
||||
case Utils.fetch_media_type(@media_types, url["mediaType"]) do
|
||||
"audio" ->
|
||||
[
|
||||
{:meta, [property: "og:audio", content: MediaProxy.url(url["href"])], []}
|
||||
| acc
|
||||
]
|
||||
acc ++
|
||||
[
|
||||
{:meta, [property: "og:audio", content: MediaProxy.url(url["href"])], []}
|
||||
]
|
||||
|
||||
# Not using preview_url for this. It saves bandwidth, but the image dimensions will
|
||||
# be wrong. We generate it on the fly and have no way to capture or analyze the
|
||||
|
@ -89,18 +89,18 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do
|
|||
# in timelines too, but you can get clever with the aspect ratio metadata as a
|
||||
# workaround.
|
||||
"image" ->
|
||||
[
|
||||
{:meta, [property: "og:image", content: MediaProxy.url(url["href"])], []},
|
||||
{:meta, [property: "og:image:alt", content: attachment["name"]], []}
|
||||
| acc
|
||||
]
|
||||
(acc ++
|
||||
[
|
||||
{:meta, [property: "og:image", content: MediaProxy.url(url["href"])], []},
|
||||
{:meta, [property: "og:image:alt", content: attachment["name"]], []}
|
||||
])
|
||||
|> maybe_add_dimensions(url)
|
||||
|
||||
"video" ->
|
||||
[
|
||||
{:meta, [property: "og:video", content: MediaProxy.url(url["href"])], []}
|
||||
| acc
|
||||
]
|
||||
(acc ++
|
||||
[
|
||||
{:meta, [property: "og:video", content: MediaProxy.url(url["href"])], []}
|
||||
])
|
||||
|> maybe_add_dimensions(url)
|
||||
|> maybe_add_video_thumbnail(url)
|
||||
|
||||
|
|
|
@ -61,13 +61,13 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do
|
|||
Enum.reduce(attachment["url"], [], fn url, acc ->
|
||||
case Utils.fetch_media_type(@media_types, url["mediaType"]) do
|
||||
"audio" ->
|
||||
[
|
||||
{:meta, [name: "twitter:card", content: "player"], []},
|
||||
{:meta, [name: "twitter:player:width", content: "480"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "80"], []},
|
||||
{:meta, [name: "twitter:player", content: player_url(id)], []}
|
||||
| acc
|
||||
]
|
||||
acc ++
|
||||
[
|
||||
{:meta, [name: "twitter:card", content: "player"], []},
|
||||
{:meta, [name: "twitter:player:width", content: "480"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "80"], []},
|
||||
{:meta, [name: "twitter:player", content: player_url(id)], []}
|
||||
]
|
||||
|
||||
# Not using preview_url for this. It saves bandwidth, but the image dimensions will
|
||||
# be wrong. We generate it on the fly and have no way to capture or analyze the
|
||||
|
@ -75,16 +75,16 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do
|
|||
# in timelines too, but you can get clever with the aspect ratio metadata as a
|
||||
# workaround.
|
||||
"image" ->
|
||||
[
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta,
|
||||
(acc ++
|
||||
[
|
||||
name: "twitter:image",
|
||||
content: MediaProxy.url(url["href"])
|
||||
], []},
|
||||
{:meta, [name: "twitter:image:alt", content: truncate(attachment["name"])], []}
|
||||
| acc
|
||||
]
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta,
|
||||
[
|
||||
name: "twitter:image",
|
||||
content: MediaProxy.url(url["href"])
|
||||
], []},
|
||||
{:meta, [name: "twitter:image:alt", content: truncate(attachment["name"])], []}
|
||||
])
|
||||
|> maybe_add_dimensions(url)
|
||||
|
||||
"video" ->
|
||||
|
@ -92,17 +92,17 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do
|
|||
height = url["height"] || 480
|
||||
width = url["width"] || 480
|
||||
|
||||
[
|
||||
{:meta, [name: "twitter:card", content: "player"], []},
|
||||
{:meta, [name: "twitter:player", content: player_url(id)], []},
|
||||
{:meta, [name: "twitter:player:width", content: "#{width}"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "#{height}"], []},
|
||||
{:meta, [name: "twitter:player:stream", content: MediaProxy.url(url["href"])],
|
||||
[]},
|
||||
{:meta, [name: "twitter:player:stream:content_type", content: url["mediaType"]],
|
||||
[]}
|
||||
| acc
|
||||
]
|
||||
acc ++
|
||||
[
|
||||
{:meta, [name: "twitter:card", content: "player"], []},
|
||||
{:meta, [name: "twitter:player", content: player_url(id)], []},
|
||||
{:meta, [name: "twitter:player:width", content: "#{width}"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "#{height}"], []},
|
||||
{:meta, [name: "twitter:player:stream", content: MediaProxy.url(url["href"])],
|
||||
[]},
|
||||
{:meta, [name: "twitter:player:stream:content_type", content: url["mediaType"]],
|
||||
[]}
|
||||
]
|
||||
|
||||
_ ->
|
||||
acc
|
||||
|
|
34
lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex
Normal file
34
lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.Plugs.APClientApiEnabledPlug do
|
||||
import Plug.Conn
|
||||
import Phoenix.Controller, only: [text: 2]
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
@enabled_path [:activitypub, :client_api_enabled]
|
||||
|
||||
def init(options \\ []), do: Map.new(options)
|
||||
|
||||
def call(conn, %{allow_server: true}) do
|
||||
if @config_impl.get(@enabled_path, false) do
|
||||
conn
|
||||
else
|
||||
conn
|
||||
|> assign(:user, nil)
|
||||
|> assign(:token, nil)
|
||||
end
|
||||
end
|
||||
|
||||
def call(conn, _) do
|
||||
if @config_impl.get(@enabled_path, false) do
|
||||
conn
|
||||
else
|
||||
conn
|
||||
|> put_status(:forbidden)
|
||||
|> text("C2S not enabled")
|
||||
|> halt()
|
||||
end
|
||||
end
|
||||
end
|
|
@ -19,8 +19,16 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
|
|||
options
|
||||
end
|
||||
|
||||
def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
||||
conn
|
||||
def call(%{assigns: %{valid_signature: true}} = conn, _opts), do: conn
|
||||
|
||||
# skip for C2S requests from authenticated users
|
||||
def call(%{assigns: %{user: %Pleroma.User{}}} = conn, _opts) do
|
||||
if get_format(conn) in ["json", "activity+json"] do
|
||||
# ensure access token is provided for 2FA
|
||||
Pleroma.Web.Plugs.EnsureAuthenticatedPlug.call(conn, %{})
|
||||
else
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
def call(conn, _opts) do
|
||||
|
|
|
@ -54,7 +54,10 @@ defmodule Pleroma.Web.RichMedia.Card do
|
|||
|
||||
@spec get_by_url(String.t() | nil) :: t() | nil | :error
|
||||
def get_by_url(url) when is_binary(url) do
|
||||
if @config_impl.get([:rich_media, :enabled]) do
|
||||
host = URI.parse(url).host
|
||||
|
||||
with true <- @config_impl.get([:rich_media, :enabled]),
|
||||
true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do
|
||||
url_hash = url_to_hash(url)
|
||||
|
||||
@cachex.fetch!(:rich_media_cache, url_hash, fn _ ->
|
||||
|
@ -69,7 +72,7 @@ defmodule Pleroma.Web.RichMedia.Card do
|
|||
end
|
||||
end)
|
||||
else
|
||||
:error
|
||||
false -> :error
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -77,7 +80,10 @@ defmodule Pleroma.Web.RichMedia.Card do
|
|||
|
||||
@spec get_or_backfill_by_url(String.t(), keyword()) :: t() | nil
|
||||
def get_or_backfill_by_url(url, opts \\ []) do
|
||||
if @config_impl.get([:rich_media, :enabled]) do
|
||||
host = URI.parse(url).host
|
||||
|
||||
with true <- @config_impl.get([:rich_media, :enabled]),
|
||||
true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do
|
||||
case get_by_url(url) do
|
||||
%__MODULE__{} = card ->
|
||||
card
|
||||
|
@ -94,7 +100,7 @@ defmodule Pleroma.Web.RichMedia.Card do
|
|||
nil
|
||||
end
|
||||
else
|
||||
nil
|
||||
false -> nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Web.RichMedia.Parsers.MetaTagsParser do
|
|||
|> Enum.reduce(data, fn el, acc ->
|
||||
attributes = normalize_attributes(el, prefix, key_name, value_name)
|
||||
|
||||
Map.merge(acc, attributes)
|
||||
Map.merge(attributes, acc)
|
||||
end)
|
||||
|> maybe_put_title(html)
|
||||
end
|
||||
|
|
|
@ -11,5 +11,16 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCard do
|
|||
|> MetaTagsParser.parse(html, "og", "property")
|
||||
|> MetaTagsParser.parse(html, "twitter", "name")
|
||||
|> MetaTagsParser.parse(html, "twitter", "property")
|
||||
|> filter_tags()
|
||||
end
|
||||
|
||||
defp filter_tags(tags) do
|
||||
Map.filter(tags, fn {k, _v} ->
|
||||
cond do
|
||||
k in ["card", "description", "image", "title", "ttl", "type", "url"] -> true
|
||||
String.starts_with?(k, "image:") -> true
|
||||
true -> false
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -755,6 +755,11 @@ defmodule Pleroma.Web.Router do
|
|||
|
||||
get("/announcements", AnnouncementController, :index)
|
||||
post("/announcements/:id/dismiss", AnnouncementController, :mark_read)
|
||||
|
||||
get("/tags/:id", TagController, :show)
|
||||
post("/tags/:id/follow", TagController, :follow)
|
||||
post("/tags/:id/unfollow", TagController, :unfollow)
|
||||
get("/followed_tags", TagController, :show_followed)
|
||||
end
|
||||
|
||||
scope "/api/v1", Pleroma.Web.MastodonAPI do
|
||||
|
@ -902,22 +907,37 @@ defmodule Pleroma.Web.Router do
|
|||
# Client to Server (C2S) AP interactions
|
||||
pipeline :activitypub_client do
|
||||
plug(:ap_service_actor)
|
||||
plug(Pleroma.Web.Plugs.APClientApiEnabledPlug)
|
||||
plug(:fetch_session)
|
||||
plug(:authenticate)
|
||||
plug(:after_auth)
|
||||
end
|
||||
|
||||
# AP interactions used by both S2S and C2S
|
||||
pipeline :activitypub_server_or_client do
|
||||
plug(:ap_service_actor)
|
||||
plug(:fetch_session)
|
||||
plug(:authenticate)
|
||||
plug(Pleroma.Web.Plugs.APClientApiEnabledPlug, allow_server: true)
|
||||
plug(:after_auth)
|
||||
plug(:http_signature)
|
||||
end
|
||||
|
||||
scope "/", Pleroma.Web.ActivityPub do
|
||||
pipe_through([:activitypub_client])
|
||||
|
||||
get("/api/ap/whoami", ActivityPubController, :whoami)
|
||||
get("/users/:nickname/inbox", ActivityPubController, :read_inbox)
|
||||
|
||||
get("/users/:nickname/outbox", ActivityPubController, :outbox)
|
||||
post("/users/:nickname/outbox", ActivityPubController, :update_outbox)
|
||||
post("/api/ap/upload_media", ActivityPubController, :upload_media)
|
||||
end
|
||||
|
||||
scope "/", Pleroma.Web.ActivityPub do
|
||||
pipe_through([:activitypub_server_or_client])
|
||||
|
||||
get("/users/:nickname/outbox", ActivityPubController, :outbox)
|
||||
|
||||
# The following two are S2S as well, see `ActivityPub.fetch_follow_information_for_user/1`:
|
||||
get("/users/:nickname/followers", ActivityPubController, :followers)
|
||||
get("/users/:nickname/following", ActivityPubController, :following)
|
||||
get("/users/:nickname/collections/featured", ActivityPubController, :pinned)
|
||||
|
|
|
@ -19,6 +19,7 @@ defmodule Pleroma.Web.Streamer do
|
|||
alias Pleroma.Web.OAuth.Token
|
||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||
alias Pleroma.Web.StreamerView
|
||||
require Pleroma.Constants
|
||||
|
||||
@registry Pleroma.Web.StreamerRegistry
|
||||
|
||||
|
@ -305,7 +306,17 @@ defmodule Pleroma.Web.Streamer do
|
|||
User.get_recipients_from_activity(item)
|
||||
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
|
||||
|
||||
Enum.each(recipient_topics, fn topic ->
|
||||
hashtag_recipients =
|
||||
if Pleroma.Constants.as_public() in item.recipients do
|
||||
Pleroma.Hashtag.get_recipients_for_activity(item)
|
||||
|> Enum.map(fn id -> "user:#{id}" end)
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
all_recipients = Enum.uniq(recipient_topics ++ hashtag_recipients)
|
||||
|
||||
Enum.each(all_recipients, fn topic ->
|
||||
push_to_socket(topic, item)
|
||||
end)
|
||||
end
|
||||
|
|
2
mix.exs
2
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
|||
def project do
|
||||
[
|
||||
app: :pleroma,
|
||||
version: version("2.8.0"),
|
||||
version: version("2.9.0"),
|
||||
elixir: "~> 1.14",
|
||||
elixirc_paths: elixirc_paths(Mix.env()),
|
||||
compilers: Mix.compilers(),
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddUserFollowsHashtag do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
create table(:user_follows_hashtag) do
|
||||
add(:hashtag_id, references(:hashtags))
|
||||
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
||||
end
|
||||
|
||||
create(unique_index(:user_follows_hashtag, [:user_id, :hashtag_id]))
|
||||
|
||||
create_if_not_exists(index(:user_follows_hashtag, [:hashtag_id]))
|
||||
end
|
||||
end
|
BIN
test/fixtures/break_analyze.png
vendored
Normal file
BIN
test/fixtures/break_analyze.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 360 KiB |
151
test/fixtures/fulmo.html
vendored
Normal file
151
test/fixtures/fulmo.html
vendored
Normal file
|
@ -0,0 +1,151 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang='eo'>
|
||||
<head>
|
||||
<meta charset='utf-8'/>
|
||||
<meta name='author' content='Tirifto'/>
|
||||
<meta name='generator' content='Pageling'/>
|
||||
<meta name='viewport' content='width=device-width,
|
||||
height=device-height,
|
||||
initial-scale=1.0'/>
|
||||
<link rel='stylesheet' type='text/css' href='/r/stiloj/tiriftejo.css'/>
|
||||
<link rel='alternate' type='application/atom+xml' href='/eo/novajhoj.atom'/>
|
||||
<link rel='icon' size='16x16' type='image/vnd.microsoft.icon' href='/favicon.ico'/>
|
||||
<link rel='icon' size='128x128' type='image/png' href='/icon.png'/>
|
||||
<link rel='alternate' hreflang='eo' href='https://tirifto.xwx.moe/eo/rakontoj/fulmo.html'/>
|
||||
<title>Fulmo</title>
|
||||
<meta property='og:title' content='Fulmo'/>
|
||||
<meta property='og:type' content='website'/>
|
||||
<meta property='og:url' content='https://tirifto.xwx.moe/eo/rakontoj/fulmo.html'/>
|
||||
<meta property='og:site_name' content='Tiriftejo'/>
|
||||
<meta property='og:locale' content='eo'/>
|
||||
<meta property='og:description' content='Pri feoj, kiuj devis ordigi falintan arbon.'/>
|
||||
<meta property='og:image' content='https://tirifto.xwx.moe/r/ilustrajhoj/pinglordigado.png'/>
|
||||
<meta property='og:image:alt' content='Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.'/>
|
||||
<meta property='og:image:height' content='630'/>
|
||||
<meta property='og:image:width' content='1200'/>
|
||||
<meta property='og:image' content='https://tirifto.xwx.moe/r/opengraph/eo.png'/>
|
||||
<meta property='og:image:alt' content='La tirifta okulo ĉirkaŭita de ornamaj steloj kaj la teksto: »Tiriftejo. Esperanto.«'/>
|
||||
<meta property='og:image:height' content='630'/>
|
||||
<meta property='og:image:width' content='1200'/>
|
||||
</head>
|
||||
<body>
|
||||
<header id='website-header'>
|
||||
<nav id='website-navigation'>
|
||||
<input type='checkbox' id='website-navigation-toggle'
|
||||
aria-description='Montri ligilojn al ĉefaj paĝoj de la retejo.'/>
|
||||
<label for='website-navigation-toggle'>Paĝoj</label>
|
||||
<a href='/eo/verkoj.html'>Verkoj</a>
|
||||
<a href='/eo/novajhoj.html'>Novaĵoj</a>
|
||||
<a href='/eo/donacoj.html'>Donacoj</a>
|
||||
<a href='/eo/prio.html'>Prio</a>
|
||||
<a href='/eo/amikoj.html'>Amikoj</a>
|
||||
<a href='/eo/kontakto.html'>Kontakto</a>
|
||||
|
||||
</nav>
|
||||
<span id='eye' role='img' aria-label=''></span>
|
||||
<nav id='language-switcher'
|
||||
aria-roledescription='lingvo-ŝanĝilo'>
|
||||
<input type='checkbox' id='language-switcher-toggle'
|
||||
aria-description='Montri ligilojn al tradukoj de tiu ĉi paĝo.'/>
|
||||
<label for='language-switcher-toggle'>Lingvoj</label>
|
||||
<a href='fulmo.html' lang='eo' hreflang='eo'><img aria-hidden='true' alt='' src='/r/flagoj/eo.png'/>Esperanto</a>
|
||||
</nav>
|
||||
</header>
|
||||
<div class='bodier'>
|
||||
<nav id='work-links'>
|
||||
<a href='.'>Ceteraj rakontoj</a>
|
||||
<a href='../bildosignoj'>Bildosignoj</a>
|
||||
<a href='../eseoj'>Eseoj</a>
|
||||
<a href='../ludoj'>Ludoj</a>
|
||||
<a href='../poemoj'>Poemoj</a>
|
||||
<a href='../vortaroj'>Vortaroj</a>
|
||||
</nav>
|
||||
<main>
|
||||
<article>
|
||||
<header>
|
||||
<h1>Fulmo</h1>
|
||||
<p>Skribis Tirifto</p>
|
||||
<time datetime='2025-01-31'>2025-01-31</time>
|
||||
</header>
|
||||
<p>»Kial ĉiam mi? Tio ne justas! Oni kulpigas min, sed ja ne mi kulpas!« La nubofeo lamentis, dum ĝi ordigis restaĵojn de falinta arbo. Plejparto el la pingloj estis brulintaj, kaj el la trunko ankoraŭ leviĝis fumo.</p>
|
||||
<p>Subite aŭdeblis ekstraj kraketoj deapude. Ĝi rigardis flanken, kaj vidis iun kaŭri apud la arbo, derompi branĉetojn, kaj orde ilin amasigi. Ŝajnis, ke ekde sia rimarkiĝo, la nekonatulo laŭeble kuntiriĝis, kaj strebis labori kiel eble plej silente.</p>
|
||||
<p>»Saluton…?« La nubofeo stariĝis, alporolante la eston. Tiu kvazaŭ frostiĝis, sed timeme ankaŭ stariĝis.</p>
|
||||
<p>»S- Saluton…« Ĝi respondis sen kuraĝo rigardi ĝiadirekten. Nun stare, videblis ke ĝi estas verdanta florofeo.</p>
|
||||
<p>»… kion vi faras tie ĉi?« La nubofeo demandis.</p>
|
||||
<p>»Nu… tiel kaj tiel… mi ordigas.«</p>
|
||||
<p>»Ho. Mi ricevis taskon ordigi ĉi tie… se vi povas atendi, vi ne bezonas peni!«</p>
|
||||
<p>»N- Nu… mi tamen volus…« Parolis la florofeo, plu deturnante la kapon.</p>
|
||||
<p>»Nu… bone, se vi tion deziras… dankon!« La nubofeo dankis, kaj returniĝis al sia laboro.</p>
|
||||
<p>Fojfoje ĝi scivole rigardis al sia nova kunlaboranto, kaj fojfoje renkontis similan rigardon de ĝia flanko, kiuokaze ambaŭ rigardoj rapide revenis al la ordigataj pingloj kaj branĉetoj. »(Kial tiom volonte helpi min?)« Pensis al si la nubofeo. »(Ĉu ĝi simple tiom bonkoras? Ĝi ja tre bele floras; eble ankaŭ ĝia koro tiel same belas…)« Kaj vere, ĝiaj surfloroj grandanime malfermis siajn belkolorajn folietojn, kaj bonodoris al mondo.</p>
|
||||
<figure>
|
||||
<picture>
|
||||
<source srcset='/r/ilustrajhoj/pinglordigado.jxl' type='image/jxl'/>
|
||||
<img src='/r/ilustrajhoj/pinglordigado.png' alt='Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.'/>
|
||||
</picture>
|
||||
<figcaption>
|
||||
Pinglordigado
|
||||
<details>
|
||||
<summary>© <time datetime='2025'>2025</time> Tirifto</summary>
|
||||
<a href='https://artlibre.org/'><img src='/r/permesiloj/lal.svg' class='stamp licence' alt='Emblemo: Permesilo de arto libera'/></a>
|
||||
</details>
|
||||
</figcaption>
|
||||
</figure>
|
||||
<p>Post iom da tempo, ĉiu feo tralaboris ĝis la trunkomezo, kaj proksimiĝis al la alia feo. Kaj tiam ekpezis sur ili devosento rompi la silenton.</p>
|
||||
<p>»… kia bela vetero, ĉu ne?« Diris la nubofeo, tuj rimarkonte, ke mallumiĝas, kaj la ĉielo restas kovrita de nuboj.</p>
|
||||
<p>»Jes ja! Tre nube. Mi ŝatas nubojn!« Respondis la alia entuziasme, sed tuj haltetis kaj deturnis la kapon. Ambaŭ feoj daŭrigis laboron silente, kaj plu proksimiĝis, ĝis tiu preskaŭ estis finita.</p>
|
||||
<p>»H… H… Ho ne…!« Subite ekdiris la nubofeo urĝe.</p>
|
||||
<p>»Kio okazas?!«</p>
|
||||
<p>»T… Tern…!«</p>
|
||||
<p>»Jen! Tenu!« La florofeo etendis manon kun granda folio. La nubofeo ĝin prenis, kaj tien ternis. Aperis ekfulmo, kaj la cindriĝinta folio disfalis.</p>
|
||||
<p>»Pardonu… mi ne volis…« Bedaŭris la nubofeo. »Mi ne scias, kial tio ĉiam okazas! Tiom plaĉas al mi promeni tere, sed ĉiuj diras, ke mi maldevus, ĉar ial ĝi ĉiam finiĝas tiel ĉi.« Ĝi montris al la arbo. »Eble ili pravas…«</p>
|
||||
<p>»Nu…« diris la florofeo bedaŭre, kaj etendis la manon.</p>
|
||||
<p>»H… H… Ne ree…!«</p>
|
||||
<p>Ekfulmis. Alia ĵus metita folio cindriĝis en la manoj de la florofeo, time ferminta la okulojn.</p>
|
||||
<p>»Dankegon… mi tre ŝatas vian helpon! Kaj mi ne… ne…«</p>
|
||||
<p>Metiĝis. Ekfulmis. Cindriĝis.</p>
|
||||
<p>»Io tre iritas mian nazon!« Plendis la nubofeo. Poste ĝi rimarkis la florpolvon, kiu disŝutiĝis el la florofeo en la tutan ĉirkaŭaĵon, kaj eĉ tuj antaŭ la nubofeon.</p>
|
||||
<p>»N- Nu…« Diris la florofeo, honte rigardanta la teron. »… pardonu.«</p>
|
||||
<footer>
|
||||
<noscript><p>Ĉu vi ŝatas la verkon? <a href='/eo/donacoj.html'>Subtenu min</a> aŭ kopiu adreson de la verko por diskonigi ĝin!</p></noscript>
|
||||
<script id='underbuttons'>
|
||||
/* @license magnet:?xt=urn:btih:90dc5c0be029de84e523b9b3922520e79e0e6f08&dn=cc0.txt CC0-1.0 */
|
||||
document.getElementById('underbuttons').outerHTML = "<p><a href='/eo/donacoj.html' class='button' target='_blank'>Subtenu min</a> <button onclick='navigator.clipboard.writeText(window.location.href.split(\"\#\")[0]).then(() => window.alert(\"Ligilo al ĉi tiu verko estas kopiita. Sendu ĝin al iu por diskonigi la verkon! 🐱\"))'>Diskonigu la verkon</button></p>"
|
||||
/* @license-end */
|
||||
</script>
|
||||
<details class='history'>
|
||||
<summary>Historio</summary>
|
||||
<dl>
|
||||
<dt><time datetime='2025-01-31'>2025-01-31</time></dt>
|
||||
<dd>Unua publikigo.</dd>
|
||||
</dl>
|
||||
</details>
|
||||
<details class='licence' open='details'>
|
||||
<summary>Permesilo</summary>
|
||||
<p>Ĉi tiun verkon vi rajtas libere kopii, disdoni, kaj ŝanĝi, laŭ kondiĉoj de la <a href='https://artlibre.org/'>Permesilo de arto libera</a>. (Resume: Vi devas mencii la aŭtoron kaj doni ligilon al la verko. Se vi ŝanĝas la verkon, vi devas laŭeble noti la faritajn ŝanĝojn, ilian daton, kaj eldoni ilin sub la sama permesilo.)</p>
|
||||
<a href='https://artlibre.org/'><img src='/r/permesiloj/lal.svg' class='stamp licence' alt='Emblemo: Permesilo de arto libera'/></a>
|
||||
</details>
|
||||
</footer>
|
||||
</article>
|
||||
</main>
|
||||
</div>
|
||||
<footer id='website-footer'>
|
||||
<div class='stamps'>
|
||||
<a href='https://gnu.org/'>
|
||||
<img class='stamp' src='/r/retetikedoj/gnu.png' lang='en' alt='GNU'/></a>
|
||||
<img class='stamp' src='/r/retetikedoj/ihhtus.png' lang='el' alt='ΙΧΘΥΣ'/>
|
||||
<img class='stamp' src='/r/retetikedoj/be-kind.apng' lang='en' alt='Be kind.'/>
|
||||
<img class='stamp' src='/r/retetikedoj/kulturo-libera.png' lang='eo' alt='Kulturo libera.'/>
|
||||
<img class='stamp' src='/r/retetikedoj/discord.png' lang='en' alt='Say ‘no’ to Discord.'/>
|
||||
<a href='https://xwx.moe/'>
|
||||
<img class='stamp' src='/r/retetikedoj/xwx-moe.png' alt='xwx.moe'/></a>
|
||||
<a href='https://mojeek.co.uk' hreflang='en'>
|
||||
<img class='stamp' src='/r/retetikedoj/mojeek.png' lang='en' alt='Mojeek'/></a>
|
||||
<a href='https://raku.org/' hreflang='en'>
|
||||
<img class='stamp' src='/r/retetikedoj/raku.png' alt='Raku'/></a>
|
||||
<picture>
|
||||
<source srcset='/r/retetikedoj/jxl.jxl' type='image/jxl'/>
|
||||
<img class='stamp' src='/r/retetikedoj/jxl.png' alt='JPEG XL'/></picture>
|
||||
</div>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
90
test/fixtures/mastodon-update-with-likes.json
vendored
Normal file
90
test/fixtures/mastodon-update-with-likes.json
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
{
|
||||
"atomUri": "ostatus:atomUri",
|
||||
"conversation": "ostatus:conversation",
|
||||
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
|
||||
"ostatus": "http://ostatus.org#",
|
||||
"sensitive": "as:sensitive",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"votersCount": "toot:votersCount"
|
||||
},
|
||||
"https://w3id.org/security/v1"
|
||||
],
|
||||
"actor": "https://pol.social/users/mkljczk",
|
||||
"cc": ["https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk",
|
||||
"https://pl.fediverse.pl/users/mkljczk",
|
||||
"https://fedi.kutno.pl/users/mkljczk"],
|
||||
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263#updates/1738096776",
|
||||
"object": {
|
||||
"atomUri": "https://pol.social/users/mkljczk/statuses/113907871635572263",
|
||||
"attachment": [],
|
||||
"attributedTo": "https://pol.social/users/mkljczk",
|
||||
"cc": ["https://www.w3.org/ns/activitystreams#Public",
|
||||
"https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk",
|
||||
"https://pl.fediverse.pl/users/mkljczk",
|
||||
"https://fedi.kutno.pl/users/mkljczk"],
|
||||
"content": "<p>test</p>",
|
||||
"contentMap": {
|
||||
"pl": "<p>test</p>"
|
||||
},
|
||||
"conversation": "https://fedi.kutno.pl/contexts/43c14c70-d3fb-42b4-a36d-4eacfab9695a",
|
||||
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263",
|
||||
"inReplyTo": "https://pol.social/users/aemstuz/statuses/113907854282654767",
|
||||
"inReplyToAtomUri": "https://pol.social/users/aemstuz/statuses/113907854282654767",
|
||||
"likes": {
|
||||
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/likes",
|
||||
"totalItems": 1,
|
||||
"type": "Collection"
|
||||
},
|
||||
"published": "2025-01-28T20:29:45Z",
|
||||
"replies": {
|
||||
"first": {
|
||||
"items": [],
|
||||
"next": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies?only_other_accounts=true&page=true",
|
||||
"partOf": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies",
|
||||
"type": "CollectionPage"
|
||||
},
|
||||
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies",
|
||||
"type": "Collection"
|
||||
},
|
||||
"sensitive": false,
|
||||
"shares": {
|
||||
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/shares",
|
||||
"totalItems": 0,
|
||||
"type": "Collection"
|
||||
},
|
||||
"summary": null,
|
||||
"tag": [
|
||||
{
|
||||
"href": "https://pol.social/users/aemstuz",
|
||||
"name": "@aemstuz",
|
||||
"type": "Mention"
|
||||
},
|
||||
{
|
||||
"href": "https://gts.mkljczk.pl/users/mkljczk",
|
||||
"name": "@mkljczk@gts.mkljczk.pl",
|
||||
"type": "Mention"
|
||||
},
|
||||
{
|
||||
"href": "https://pl.fediverse.pl/users/mkljczk",
|
||||
"name": "@mkljczk@fediverse.pl",
|
||||
"type": "Mention"
|
||||
},
|
||||
{
|
||||
"href": "https://fedi.kutno.pl/users/mkljczk",
|
||||
"name": "@mkljczk@fedi.kutno.pl",
|
||||
"type": "Mention"
|
||||
}
|
||||
],
|
||||
"to": ["https://pol.social/users/mkljczk/followers"],
|
||||
"type": "Note",
|
||||
"updated": "2025-01-28T20:39:36Z",
|
||||
"url": "https://pol.social/@mkljczk/113907871635572263"
|
||||
},
|
||||
"published": "2025-01-28T20:39:36Z",
|
||||
"to": ["https://pol.social/users/mkljczk/followers"],
|
||||
"type": "Update"
|
||||
}
|
|
@ -411,7 +411,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
|
|||
["scheduled_activities"],
|
||||
["schema_migrations"],
|
||||
["thread_mutes"],
|
||||
# ["user_follows_hashtag"], # not in pleroma
|
||||
["user_follows_hashtag"],
|
||||
# ["user_frontend_setting_profiles"], # not in pleroma
|
||||
["user_invite_tokens"],
|
||||
["user_notes"],
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMapTest do
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap
|
||||
|
||||
test "it validates" do
|
||||
data = %{
|
||||
"en-US" => "mew mew",
|
||||
"en-GB" => "meow meow"
|
||||
}
|
||||
|
||||
assert {:ok, ^data} = ContentLanguageMap.cast(data)
|
||||
end
|
||||
|
||||
test "it validates empty strings" do
|
||||
data = %{
|
||||
"en-US" => "mew mew",
|
||||
"en-GB" => ""
|
||||
}
|
||||
|
||||
assert {:ok, ^data} = ContentLanguageMap.cast(data)
|
||||
end
|
||||
|
||||
test "it ignores non-strings within the map" do
|
||||
data = %{
|
||||
"en-US" => "mew mew",
|
||||
"en-GB" => 123
|
||||
}
|
||||
|
||||
assert {:ok, validated_data} = ContentLanguageMap.cast(data)
|
||||
|
||||
assert validated_data == %{"en-US" => "mew mew"}
|
||||
end
|
||||
|
||||
test "it ignores bad locale codes" do
|
||||
data = %{
|
||||
"en-US" => "mew mew",
|
||||
"en_GB" => "meow meow",
|
||||
"en<<#@!$#!@%!GB" => "meow meow"
|
||||
}
|
||||
|
||||
assert {:ok, validated_data} = ContentLanguageMap.cast(data)
|
||||
|
||||
assert validated_data == %{"en-US" => "mew mew"}
|
||||
end
|
||||
|
||||
test "it complains with non-map data" do
|
||||
assert :error = ContentLanguageMap.cast("mew")
|
||||
assert :error = ContentLanguageMap.cast(["mew"])
|
||||
assert :error = ContentLanguageMap.cast([%{"en-US" => "mew"}])
|
||||
end
|
||||
end
|
|
@ -0,0 +1,29 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCodeTest do
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode
|
||||
|
||||
test "it accepts language code" do
|
||||
text = "pl"
|
||||
assert {:ok, ^text} = LanguageCode.cast(text)
|
||||
end
|
||||
|
||||
test "it accepts language code with region" do
|
||||
text = "pl-PL"
|
||||
assert {:ok, ^text} = LanguageCode.cast(text)
|
||||
end
|
||||
|
||||
test "errors for invalid language code" do
|
||||
assert {:error, :invalid_language} = LanguageCode.cast("ru_RU")
|
||||
assert {:error, :invalid_language} = LanguageCode.cast(" ")
|
||||
assert {:error, :invalid_language} = LanguageCode.cast("en-US\n")
|
||||
end
|
||||
|
||||
test "errors for non-text" do
|
||||
assert :error == LanguageCode.cast(42)
|
||||
end
|
||||
end
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
defmodule Pleroma.Emoji.PackTest do
|
||||
use Pleroma.DataCase
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Emoji.Pack
|
||||
|
||||
@emoji_path Path.join(
|
||||
|
@ -53,6 +54,63 @@ defmodule Pleroma.Emoji.PackTest do
|
|||
|
||||
assert updated_pack.files_count == 5
|
||||
end
|
||||
|
||||
test "skips existing emojis when adding from zip file", %{pack: pack} do
|
||||
# First, let's create a test pack with a "bear" emoji
|
||||
test_pack_path = Path.join(@emoji_path, "test_bear_pack")
|
||||
File.mkdir_p(test_pack_path)
|
||||
|
||||
# Create a pack.json file
|
||||
File.write!(Path.join(test_pack_path, "pack.json"), """
|
||||
{
|
||||
"files": { "bear": "bear.png" },
|
||||
"pack": {
|
||||
"description": "Bear Pack", "homepage": "https://pleroma.social",
|
||||
"license": "Test license", "share-files": true
|
||||
}}
|
||||
""")
|
||||
|
||||
# Copy a test image to use as the bear emoji
|
||||
File.cp!(
|
||||
Path.absname("test/instance_static/emoji/test_pack/blank.png"),
|
||||
Path.join(test_pack_path, "bear.png")
|
||||
)
|
||||
|
||||
# Load the pack to register the "bear" emoji in the global registry
|
||||
{:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack")
|
||||
|
||||
# Reload emoji to make sure the bear emoji is in the global registry
|
||||
Emoji.reload()
|
||||
|
||||
# Verify that the bear emoji exists in the global registry
|
||||
assert Emoji.exist?("bear")
|
||||
|
||||
# Now try to add a zip file that contains an emoji with the same shortcode
|
||||
file = %Plug.Upload{
|
||||
content_type: "application/zip",
|
||||
filename: "emojis.zip",
|
||||
path: Path.absname("test/fixtures/emojis.zip")
|
||||
}
|
||||
|
||||
{:ok, updated_pack} = Pack.add_file(pack, nil, nil, file)
|
||||
|
||||
# Verify that the "bear" emoji was skipped
|
||||
refute Map.has_key?(updated_pack.files, "bear")
|
||||
|
||||
# Other emojis should be added
|
||||
assert Map.has_key?(updated_pack.files, "a_trusted_friend-128")
|
||||
assert Map.has_key?(updated_pack.files, "auroraborealis")
|
||||
assert Map.has_key?(updated_pack.files, "baby_in_a_box")
|
||||
assert Map.has_key?(updated_pack.files, "bear-128")
|
||||
|
||||
# Total count should be 4 (all emojis except "bear")
|
||||
assert updated_pack.files_count == 4
|
||||
|
||||
# Clean up the test pack
|
||||
on_exit(fn ->
|
||||
File.rm_rf!(test_pack_path)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
test "returns error when zip file is bad", %{pack: pack} do
|
||||
|
|
499
test/pleroma/safe_zip_test.exs
Normal file
499
test/pleroma/safe_zip_test.exs
Normal file
|
@ -0,0 +1,499 @@
|
|||
defmodule Pleroma.SafeZipTest do
|
||||
# Not making this async because it creates and deletes files
|
||||
use ExUnit.Case
|
||||
|
||||
alias Pleroma.SafeZip
|
||||
|
||||
@fixtures_dir "test/fixtures"
|
||||
@tmp_dir "test/zip_tmp"
|
||||
|
||||
setup do
|
||||
# Ensure tmp directory exists
|
||||
File.mkdir_p!(@tmp_dir)
|
||||
|
||||
on_exit(fn ->
|
||||
# Clean up any files created during tests
|
||||
File.rm_rf!(@tmp_dir)
|
||||
File.mkdir_p!(@tmp_dir)
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "list_dir_file/1" do
|
||||
test "lists files in a valid zip" do
|
||||
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip"))
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
end
|
||||
|
||||
test "returns an empty list for empty zip" do
|
||||
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip"))
|
||||
assert files == []
|
||||
end
|
||||
|
||||
test "returns error for non-existent file" do
|
||||
assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip"))
|
||||
end
|
||||
|
||||
test "only lists regular files, not directories" do
|
||||
# Create a zip with both files and directories
|
||||
zip_path = create_zip_with_directory()
|
||||
|
||||
# List files with SafeZip
|
||||
{:ok, files} = SafeZip.list_dir_file(zip_path)
|
||||
|
||||
# Verify only regular files are listed, not directories
|
||||
assert "file_in_dir/test_file.txt" in files
|
||||
assert "root_file.txt" in files
|
||||
|
||||
# Directory entries should not be included in the list
|
||||
refute "file_in_dir/" in files
|
||||
end
|
||||
end
|
||||
|
||||
describe "contains_all_data?/2" do
|
||||
test "returns true when all files are in the archive" do
|
||||
# For this test, we'll create our own zip file with known content
|
||||
# to ensure we can test the contains_all_data? function properly
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Check if the archive contains the root file
|
||||
# Note: The function expects charlists (Erlang strings) in the MapSet
|
||||
assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"]))
|
||||
end
|
||||
|
||||
test "returns false when files are missing" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Create a MapSet with non-existent files
|
||||
fset = MapSet.new([~c"nonexistent.txt"])
|
||||
|
||||
refute SafeZip.contains_all_data?(archive_data, fset)
|
||||
end
|
||||
|
||||
test "returns false for invalid archive data" do
|
||||
refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"]))
|
||||
end
|
||||
|
||||
test "only checks for regular files, not directories" do
|
||||
# Create a zip with both files and directories
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Check if the archive contains a directory (should return false)
|
||||
refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"]))
|
||||
|
||||
# For this test, we'll manually check if the file exists in the archive
|
||||
# by extracting it and verifying it exists
|
||||
extract_dir = Path.join(@tmp_dir, "extract_check")
|
||||
File.mkdir_p!(extract_dir)
|
||||
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||
|
||||
# Verify the root file was extracted
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify the file exists on disk
|
||||
assert File.exists?(Path.join(extract_dir, "root_file.txt"))
|
||||
end
|
||||
end
|
||||
|
||||
describe "zip/4" do
|
||||
test "creates a zip file on disk" do
|
||||
# Create a test file
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "test content")
|
||||
|
||||
# Create a zip file
|
||||
zip_path = Path.join(@tmp_dir, "test.zip")
|
||||
assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false)
|
||||
|
||||
# Verify the zip file exists
|
||||
assert File.exists?(zip_path)
|
||||
end
|
||||
|
||||
test "creates a zip file in memory" do
|
||||
# Create a test file
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "test content")
|
||||
|
||||
# Create a zip file in memory
|
||||
zip_name = Path.join(@tmp_dir, "test.zip")
|
||||
|
||||
assert {:ok, {^zip_name, zip_data}} =
|
||||
SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true)
|
||||
|
||||
# Verify the zip data is binary
|
||||
assert is_binary(zip_data)
|
||||
end
|
||||
|
||||
test "returns error for unsafe paths" do
|
||||
# Try to zip a file with path traversal
|
||||
assert {:error, _} =
|
||||
SafeZip.zip(
|
||||
Path.join(@tmp_dir, "test.zip"),
|
||||
["../fixtures/test.txt"],
|
||||
@tmp_dir,
|
||||
false
|
||||
)
|
||||
end
|
||||
|
||||
test "can create zip with directories" do
|
||||
# Create a directory structure
|
||||
dir_path = Path.join(@tmp_dir, "test_dir")
|
||||
File.mkdir_p!(dir_path)
|
||||
|
||||
file_in_dir_path = Path.join(dir_path, "file_in_dir.txt")
|
||||
File.write!(file_in_dir_path, "file in directory")
|
||||
|
||||
# Create a zip file
|
||||
zip_path = Path.join(@tmp_dir, "dir_test.zip")
|
||||
|
||||
assert {:ok, ^zip_path} =
|
||||
SafeZip.zip(
|
||||
zip_path,
|
||||
["test_dir/file_in_dir.txt"],
|
||||
@tmp_dir,
|
||||
false
|
||||
)
|
||||
|
||||
# Verify the zip file exists
|
||||
assert File.exists?(zip_path)
|
||||
|
||||
# Extract and verify the directory structure is preserved
|
||||
extract_dir = Path.join(@tmp_dir, "extract")
|
||||
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||
|
||||
# Check if the file path is in the list, accounting for possible full paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
String.ends_with?(file, "file_in_dir.txt")
|
||||
end)
|
||||
|
||||
# Verify the file exists in the expected location
|
||||
assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"]))
|
||||
end
|
||||
end
|
||||
|
||||
describe "unzip_file/3" do
|
||||
@tag :skip
|
||||
test "extracts files from a zip archive" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
|
||||
# Extract the archive
|
||||
assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir)
|
||||
|
||||
# Verify files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
# Verify at least one file exists
|
||||
first_file = List.first(files)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file)))
|
||||
end
|
||||
|
||||
test "extracts specific files from a zip archive" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
|
||||
# Get list of files in the archive
|
||||
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||
file_to_extract = List.first(all_files)
|
||||
|
||||
# Extract only one file
|
||||
assert {:ok, [extracted_file]} =
|
||||
SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract])
|
||||
|
||||
# Verify only the specified file was extracted
|
||||
assert Path.basename(extracted_file) == Path.basename(file_to_extract)
|
||||
|
||||
# Check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||
end
|
||||
|
||||
test "returns error for invalid zip file" do
|
||||
invalid_path = Path.join(@tmp_dir, "invalid.zip")
|
||||
File.write!(invalid_path, "not a zip file")
|
||||
|
||||
assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir)
|
||||
end
|
||||
|
||||
test "creates directories when extracting files in subdirectories" do
|
||||
# Create a zip with files in subdirectories
|
||||
zip_path = create_zip_with_directory()
|
||||
|
||||
# Extract the archive
|
||||
assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir)
|
||||
|
||||
# Verify files were extracted - handle both relative and absolute paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "test_file.txt" &&
|
||||
String.contains?(file, "file_in_dir")
|
||||
end)
|
||||
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify directory was created
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
assert File.exists?(dir_path)
|
||||
assert File.dir?(dir_path)
|
||||
|
||||
# Verify file in directory was extracted
|
||||
file_path = Path.join(dir_path, "test_file.txt")
|
||||
assert File.exists?(file_path)
|
||||
end
|
||||
end
|
||||
|
||||
describe "unzip_data/3" do
|
||||
@tag :skip
|
||||
test "extracts files from zip data" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Extract the archive from data
|
||||
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||
|
||||
# Verify files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
# Verify at least one file exists
|
||||
first_file = List.first(files)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file)))
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "extracts specific files from zip data" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Get list of files in the archive
|
||||
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||
file_to_extract = List.first(all_files)
|
||||
|
||||
# Extract only one file
|
||||
assert {:ok, extracted_files} =
|
||||
SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract])
|
||||
|
||||
# Verify only the specified file was extracted
|
||||
assert Enum.any?(extracted_files, fn path ->
|
||||
Path.basename(path) == Path.basename(file_to_extract)
|
||||
end)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||
end
|
||||
|
||||
test "returns error for invalid zip data" do
|
||||
assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir)
|
||||
end
|
||||
|
||||
test "creates directories when extracting files in subdirectories from data" do
|
||||
# Create a zip with files in subdirectories
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Extract the archive from data
|
||||
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||
|
||||
# Verify files were extracted - handle both relative and absolute paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "test_file.txt" &&
|
||||
String.contains?(file, "file_in_dir")
|
||||
end)
|
||||
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify directory was created
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
assert File.exists?(dir_path)
|
||||
assert File.dir?(dir_path)
|
||||
|
||||
# Verify file in directory was extracted
|
||||
file_path = Path.join(dir_path, "test_file.txt")
|
||||
assert File.exists?(file_path)
|
||||
end
|
||||
end
|
||||
|
||||
# Security tests
|
||||
describe "security checks" do
|
||||
test "prevents path traversal in zip extraction" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
|
||||
# Try to extract it with SafeZip
|
||||
assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir)
|
||||
|
||||
# Verify the file was not extracted outside the target directory
|
||||
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||
end
|
||||
|
||||
test "prevents directory traversal in zip listing" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
|
||||
# Try to list files with SafeZip
|
||||
assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path)
|
||||
end
|
||||
|
||||
test "prevents path traversal in zip data extraction" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
malicious_data = File.read!(malicious_zip_path)
|
||||
|
||||
# Try to extract it with SafeZip
|
||||
assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir)
|
||||
|
||||
# Verify the file was not extracted outside the target directory
|
||||
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||
end
|
||||
|
||||
test "handles zip bomb attempts" do
|
||||
# Create a zip bomb (a zip with many files or large files)
|
||||
zip_bomb_path = create_zip_bomb()
|
||||
|
||||
# The SafeZip module should handle this gracefully
|
||||
# Either by successfully extracting it (if it's not too large)
|
||||
# or by returning an error (if it detects a potential zip bomb)
|
||||
result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir)
|
||||
|
||||
case result do
|
||||
{:ok, _} ->
|
||||
# If it successfully extracts, make sure it didn't fill up the disk
|
||||
# This is a simple check to ensure the extraction was controlled
|
||||
assert File.exists?(@tmp_dir)
|
||||
|
||||
{:error, _} ->
|
||||
# If it returns an error, that's also acceptable
|
||||
# The important thing is that it doesn't crash or hang
|
||||
assert true
|
||||
end
|
||||
end
|
||||
|
||||
test "handles deeply nested directory structures" do
|
||||
# Create a zip with deeply nested directories
|
||||
deep_nest_path = create_deeply_nested_zip()
|
||||
|
||||
# The SafeZip module should handle this gracefully
|
||||
result = SafeZip.unzip_file(deep_nest_path, @tmp_dir)
|
||||
|
||||
case result do
|
||||
{:ok, files} ->
|
||||
# If it successfully extracts, verify the files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
{:error, _} ->
|
||||
# If it returns an error, that's also acceptable
|
||||
# The important thing is that it doesn't crash or hang
|
||||
assert true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Helper functions to create test fixtures
|
||||
|
||||
# Creates a zip file with a path traversal attempt
|
||||
defp create_malicious_zip_with_path_traversal do
|
||||
malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip")
|
||||
|
||||
# Create a file to include in the zip
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "malicious content")
|
||||
|
||||
# Use Erlang's zip module directly to create a zip with path traversal
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(malicious_zip_path),
|
||||
[{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}]
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip file with directory entries
|
||||
defp create_zip_with_directory do
|
||||
zip_path = Path.join(@tmp_dir, "with_directory.zip")
|
||||
|
||||
# Create files to include in the zip
|
||||
root_file_path = Path.join(@tmp_dir, "root_file.txt")
|
||||
File.write!(root_file_path, "root file content")
|
||||
|
||||
# Create a directory and a file in it
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
File.mkdir_p!(dir_path)
|
||||
|
||||
file_in_dir_path = Path.join(dir_path, "test_file.txt")
|
||||
File.write!(file_in_dir_path, "file in directory content")
|
||||
|
||||
# Use Erlang's zip module to create a zip with directory structure
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
[
|
||||
{String.to_charlist("root_file.txt"), File.read!(root_file_path)},
|
||||
{String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)}
|
||||
]
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip bomb (a zip with many small files)
|
||||
defp create_zip_bomb do
|
||||
zip_path = Path.join(@tmp_dir, "zip_bomb.zip")
|
||||
|
||||
# Create a small file to duplicate many times
|
||||
small_file_path = Path.join(@tmp_dir, "small_file.txt")
|
||||
File.write!(small_file_path, String.duplicate("A", 100))
|
||||
|
||||
# Create a list of many files to include in the zip
|
||||
file_entries =
|
||||
for i <- 1..100 do
|
||||
{String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)}
|
||||
end
|
||||
|
||||
# Use Erlang's zip module to create a zip with many files
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
file_entries
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip with deeply nested directories
|
||||
defp create_deeply_nested_zip do
|
||||
zip_path = Path.join(@tmp_dir, "deep_nest.zip")
|
||||
|
||||
# Create a file to include in the zip
|
||||
file_content = "test content"
|
||||
|
||||
# Create a list of deeply nested files
|
||||
file_entries =
|
||||
for i <- 1..10 do
|
||||
nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end)
|
||||
{String.to_charlist("#{nested_path}/file.txt"), file_content}
|
||||
end
|
||||
|
||||
# Use Erlang's zip module to create a zip with deeply nested directories
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
file_entries
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
end
|
|
@ -34,6 +34,20 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadataTest do
|
|||
assert meta.blurhash == "eXJi-E:SwCEm5rCmn$+YWYn+15K#5A$xxCi{SiV]s*W:Efa#s.jE-T"
|
||||
end
|
||||
|
||||
test "it gets dimensions for grayscale images" do
|
||||
upload = %Pleroma.Upload{
|
||||
name: "break_analyze.png",
|
||||
content_type: "image/png",
|
||||
path: Path.absname("test/fixtures/break_analyze.png"),
|
||||
tempfile: Path.absname("test/fixtures/break_analyze.png")
|
||||
}
|
||||
|
||||
{:ok, :filtered, meta} = AnalyzeMetadata.filter(upload)
|
||||
|
||||
assert %{width: 1410, height: 2048} = meta
|
||||
assert is_nil(meta.blurhash)
|
||||
end
|
||||
|
||||
test "adds the dimensions for videos" do
|
||||
upload = %Pleroma.Upload{
|
||||
name: "coolvideo.mp4",
|
||||
|
|
|
@ -185,13 +185,13 @@ defmodule Pleroma.User.BackupTest do
|
|||
%{"@language" => "und"}
|
||||
],
|
||||
"bookmarks" => "bookmarks.json",
|
||||
"followers" => "http://cofe.io/users/cofe/followers",
|
||||
"following" => "http://cofe.io/users/cofe/following",
|
||||
"followers" => "followers.json",
|
||||
"following" => "following.json",
|
||||
"id" => "http://cofe.io/users/cofe",
|
||||
"inbox" => "http://cofe.io/users/cofe/inbox",
|
||||
"likes" => "likes.json",
|
||||
"name" => "Cofe",
|
||||
"outbox" => "http://cofe.io/users/cofe/outbox",
|
||||
"outbox" => "outbox.json",
|
||||
"preferredUsername" => "cofe",
|
||||
"publicKey" => %{
|
||||
"id" => "http://cofe.io/users/cofe#main-key",
|
||||
|
|
|
@ -2919,4 +2919,74 @@ defmodule Pleroma.UserTest do
|
|||
|
||||
assert [%{"verified_at" => ^verified_at}] = user.fields
|
||||
end
|
||||
|
||||
describe "follow_hashtag/2" do
|
||||
test "should follow a hashtag" do
|
||||
user = insert(:user)
|
||||
hashtag = insert(:hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
|
||||
assert user.followed_hashtags |> Enum.count() == 1
|
||||
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
|
||||
end
|
||||
|
||||
test "should not follow a hashtag twice" do
|
||||
user = insert(:user)
|
||||
hashtag = insert(:hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
|
||||
assert user.followed_hashtags |> Enum.count() == 1
|
||||
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
|
||||
end
|
||||
|
||||
test "can follow multiple hashtags" do
|
||||
user = insert(:user)
|
||||
hashtag = insert(:hashtag)
|
||||
other_hashtag = insert(:hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
assert {:ok, _} = user |> User.follow_hashtag(other_hashtag)
|
||||
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
|
||||
assert user.followed_hashtags |> Enum.count() == 2
|
||||
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
|
||||
assert other_hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "unfollow_hashtag/2" do
|
||||
test "should unfollow a hashtag" do
|
||||
user = insert(:user)
|
||||
hashtag = insert(:hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
|
||||
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
|
||||
assert user.followed_hashtags |> Enum.count() == 0
|
||||
end
|
||||
|
||||
test "should not error when trying to unfollow a hashtag twice" do
|
||||
user = insert(:user)
|
||||
hashtag = insert(:hashtag)
|
||||
|
||||
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
|
||||
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
|
||||
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
|
||||
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
|
||||
assert user.followed_hashtags |> Enum.count() == 0
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1344,6 +1344,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
end
|
||||
|
||||
describe "GET /users/:nickname/outbox" do
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Config)
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it paginates correctly", %{conn: conn} do
|
||||
user = insert(:user)
|
||||
conn = assign(conn, :user, user)
|
||||
|
@ -1432,6 +1437,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
assert %{"orderedItems" => []} = resp
|
||||
end
|
||||
|
||||
test "it does not return a local note activity when C2S API is disabled", %{conn: conn} do
|
||||
clear_config([:activitypub, :client_api_enabled], false)
|
||||
user = insert(:user)
|
||||
reader = insert(:user)
|
||||
{:ok, _note_activity} = CommonAPI.post(user, %{status: "mew mew", visibility: "local"})
|
||||
|
||||
resp =
|
||||
conn
|
||||
|> assign(:user, reader)
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get("/users/#{user.nickname}/outbox?page=true")
|
||||
|> json_response(200)
|
||||
|
||||
assert %{"orderedItems" => []} = resp
|
||||
end
|
||||
|
||||
test "it returns a note activity in a collection", %{conn: conn} do
|
||||
note_activity = insert(:note_activity)
|
||||
note_object = Object.normalize(note_activity, fetch: false)
|
||||
|
@ -1483,6 +1504,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
assert [answer_outbox] = outbox_get["orderedItems"]
|
||||
assert answer_outbox["id"] == activity.data["id"]
|
||||
end
|
||||
|
||||
test "it works with authorized fetch forced when authenticated" do
|
||||
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||
|
||||
user = insert(:user)
|
||||
outbox_endpoint = user.ap_id <> "/outbox"
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> assign(:user, user)
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get(outbox_endpoint)
|
||||
|
||||
assert json_response(conn, 200)
|
||||
end
|
||||
|
||||
test "it fails with authorized fetch forced when unauthenticated", %{conn: conn} do
|
||||
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||
|
||||
user = insert(:user)
|
||||
outbox_endpoint = user.ap_id <> "/outbox"
|
||||
|
||||
conn =
|
||||
conn
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get(outbox_endpoint)
|
||||
|
||||
assert response(conn, 401)
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST /users/:nickname/outbox (C2S)" do
|
||||
|
@ -2153,6 +2203,30 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||
|> json_response(403)
|
||||
end
|
||||
|
||||
test "they don't work when C2S API is disabled", %{conn: conn} do
|
||||
clear_config([:activitypub, :client_api_enabled], false)
|
||||
|
||||
user = insert(:user)
|
||||
|
||||
assert conn
|
||||
|> assign(:user, user)
|
||||
|> get("/api/ap/whoami")
|
||||
|> response(403)
|
||||
|
||||
desc = "Description of the image"
|
||||
|
||||
image = %Plug.Upload{
|
||||
content_type: "image/jpeg",
|
||||
path: Path.absname("test/fixtures/image.jpg"),
|
||||
filename: "an_image.jpg"
|
||||
}
|
||||
|
||||
assert conn
|
||||
|> assign(:user, user)
|
||||
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||
|> response(403)
|
||||
end
|
||||
end
|
||||
|
||||
test "pinned collection", %{conn: conn} do
|
||||
|
|
|
@ -867,6 +867,33 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
end
|
||||
end
|
||||
|
||||
describe "fetch activities for followed hashtags" do
|
||||
test "it should return public activities that reference a given hashtag" do
|
||||
hashtag = insert(:hashtag, name: "tenshi")
|
||||
user = insert(:user)
|
||||
other_user = insert(:user)
|
||||
|
||||
{:ok, normally_visible} =
|
||||
CommonAPI.post(other_user, %{status: "hello :)", visibility: "public"})
|
||||
|
||||
{:ok, public} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "public"})
|
||||
{:ok, _unrelated} = CommonAPI.post(user, %{status: "dai #tensh", visibility: "public"})
|
||||
{:ok, unlisted} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "unlisted"})
|
||||
{:ok, _private} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "private"})
|
||||
|
||||
activities =
|
||||
ActivityPub.fetch_activities([other_user.follower_address], %{
|
||||
followed_hashtags: [hashtag.id]
|
||||
})
|
||||
|
||||
assert length(activities) == 3
|
||||
normal_id = normally_visible.id
|
||||
public_id = public.id
|
||||
unlisted_id = unlisted.id
|
||||
assert [%{id: ^normal_id}, %{id: ^public_id}, %{id: ^unlisted_id}] = activities
|
||||
end
|
||||
end
|
||||
|
||||
describe "fetch activities in context" do
|
||||
test "retrieves activities that have a given context" do
|
||||
{:ok, activity} = ActivityBuilder.insert(%{"type" => "Create", "context" => "2hu"})
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.FODirectReplyTest do
|
||||
use Pleroma.DataCase
|
||||
import Pleroma.Factory
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.ActivityPub.MRF.FODirectReply
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
test "replying to followers-only/private is changed to direct" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{
|
||||
status: "Has anyone seen Selina Kyle's latest selfies?",
|
||||
visibility: "private"
|
||||
})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
expected_to = [batman.ap_id]
|
||||
expected_cc = []
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert expected_to == filtered["to"]
|
||||
assert expected_cc == filtered["cc"]
|
||||
assert expected_to == filtered["object"]["to"]
|
||||
assert expected_cc == filtered["object"]["cc"]
|
||||
end
|
||||
|
||||
test "replies to unlisted posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{
|
||||
status: "Has anyone seen Selina Kyle's latest selfies?",
|
||||
visibility: "unlisted"
|
||||
})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈<200d>⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replies to public posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{status: "Has anyone seen Selina Kyle's latest selfies?"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈<200d>⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "non-reply posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(post)
|
||||
|
||||
assert match?(^filtered, post)
|
||||
end
|
||||
end
|
|
@ -1,140 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.QuietReplyTest do
|
||||
use Pleroma.DataCase
|
||||
import Pleroma.Factory
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.ActivityPub.MRF.QuietReply
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
test "replying to public post is forced to be quiet" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [
|
||||
batman.ap_id,
|
||||
Pleroma.Constants.as_public()
|
||||
],
|
||||
"cc" => [robin.follower_address],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [
|
||||
batman.ap_id,
|
||||
Pleroma.Constants.as_public()
|
||||
],
|
||||
"cc" => [robin.follower_address],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
expected_to = [batman.ap_id, robin.follower_address]
|
||||
expected_cc = [Pleroma.Constants.as_public()]
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert expected_to == filtered["to"]
|
||||
assert expected_cc == filtered["cc"]
|
||||
assert expected_to == filtered["object"]["to"]
|
||||
assert expected_cc == filtered["object"]["cc"]
|
||||
end
|
||||
|
||||
test "replying to unlisted post is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!", visibility: "private"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replying direct is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replying followers-only is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "non-reply posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(post)
|
||||
|
||||
assert match?(^filtered, post)
|
||||
end
|
||||
end
|
|
@ -128,6 +128,17 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest
|
|||
%{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
|
||||
end
|
||||
|
||||
test "a Note with validated likes collection validates" do
|
||||
insert(:user, ap_id: "https://pol.social/users/mkljczk")
|
||||
|
||||
%{"object" => note} =
|
||||
"test/fixtures/mastodon-update-with-likes.json"
|
||||
|> File.read!()
|
||||
|> Jason.decode!()
|
||||
|
||||
%{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
|
||||
end
|
||||
|
||||
test "Fedibird quote post" do
|
||||
insert(:user, ap_id: "https://fedibird.com/users/noellabo")
|
||||
|
||||
|
@ -176,4 +187,71 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest
|
|||
name: "RE: https://server.example/objects/123"
|
||||
}
|
||||
end
|
||||
|
||||
describe "Note language" do
|
||||
test "it detects language from JSON-LD context" do
|
||||
user = insert(:user)
|
||||
|
||||
note_activity = %{
|
||||
"@context" => ["https://www.w3.org/ns/activitystreams", %{"@language" => "pl"}],
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"id" => Utils.generate_object_id(),
|
||||
"type" => "Note",
|
||||
"content" => "Szczęść Boże",
|
||||
"attributedTo" => user.ap_id
|
||||
},
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
{:ok, _create_activity, meta} = ObjectValidator.validate(note_activity, [])
|
||||
|
||||
assert meta[:object_data]["language"] == "pl"
|
||||
end
|
||||
|
||||
test "it detects language from contentMap" do
|
||||
user = insert(:user)
|
||||
|
||||
note = %{
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"id" => Utils.generate_object_id(),
|
||||
"type" => "Note",
|
||||
"content" => "Szczęść Boże",
|
||||
"contentMap" => %{
|
||||
"de" => "Gott segne",
|
||||
"pl" => "Szczęść Boże"
|
||||
},
|
||||
"attributedTo" => user.ap_id
|
||||
}
|
||||
|
||||
{:ok, object} = ArticleNotePageValidator.cast_and_apply(note)
|
||||
|
||||
assert object.language == "pl"
|
||||
end
|
||||
|
||||
test "it adds contentMap if language is specified" do
|
||||
user = insert(:user)
|
||||
|
||||
note = %{
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"id" => Utils.generate_object_id(),
|
||||
"type" => "Note",
|
||||
"content" => "тест",
|
||||
"language" => "uk",
|
||||
"attributedTo" => user.ap_id
|
||||
}
|
||||
|
||||
{:ok, object} = ArticleNotePageValidator.cast_and_apply(note)
|
||||
|
||||
assert object.contentMap == %{
|
||||
"uk" => "тест"
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -13,6 +13,23 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
describe "attachments" do
|
||||
test "works with apng" do
|
||||
attachment =
|
||||
%{
|
||||
"mediaType" => "image/apng",
|
||||
"name" => "",
|
||||
"type" => "Document",
|
||||
"url" =>
|
||||
"https://media.misskeyusercontent.com/io/2859c26e-cd43-4550-848b-b6243bc3fe28.apng"
|
||||
}
|
||||
|
||||
assert {:ok, attachment} =
|
||||
AttachmentValidator.cast_and_validate(attachment)
|
||||
|> Ecto.Changeset.apply_action(:insert)
|
||||
|
||||
assert attachment.mediaType == "image/apng"
|
||||
end
|
||||
|
||||
test "fails without url" do
|
||||
attachment = %{
|
||||
"mediaType" => "",
|
||||
|
|
|
@ -219,6 +219,36 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do
|
|||
"<p><span class=\"h-card\"><a href=\"http://localtesting.pleroma.lol/users/lain\" class=\"u-url mention\">@<span>lain</span></a></span></p>"
|
||||
end
|
||||
|
||||
test "it only uses contentMap if content is not present" do
|
||||
user = insert(:user)
|
||||
|
||||
message = %{
|
||||
"@context" => "https://www.w3.org/ns/activitystreams",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"id" => Utils.generate_object_id(),
|
||||
"type" => "Note",
|
||||
"content" => "Hi",
|
||||
"contentMap" => %{
|
||||
"de" => "Hallo",
|
||||
"uk" => "Привіт"
|
||||
},
|
||||
"inReplyTo" => nil,
|
||||
"attributedTo" => user.ap_id
|
||||
},
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
{:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(message)
|
||||
object = Object.normalize(data["object"], fetch: false)
|
||||
|
||||
assert object.data["content"] == "Hi"
|
||||
end
|
||||
|
||||
test "it works for incoming notices with a nil contentMap (firefish)" do
|
||||
data =
|
||||
File.read!("test/fixtures/mastodon-post-activity-contentmap.json")
|
||||
|
|
|
@ -156,6 +156,246 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
|||
# It fetched the quoted post
|
||||
assert Object.normalize("https://misskey.io/notes/8vs6wxufd0")
|
||||
end
|
||||
|
||||
test "doesn't allow remote edits to fake local likes" do
|
||||
# as a spot check for no internal fields getting injected
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
local_user = insert(:user)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => [local_user.ap_id]
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "strips internal fields from history items in edited notes" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
local_user = insert(:user)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => [],
|
||||
"like_count" => 0
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [
|
||||
Map.merge(create_data["object"], %{
|
||||
"likes" => [local_user.ap_id],
|
||||
"like_count" => 1,
|
||||
"pleroma" => %{"internal_field" => "should_be_stripped"}
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
# Check that internal fields are stripped from history items
|
||||
history_item = List.first(object.data["formerRepresentations"]["orderedItems"])
|
||||
assert history_item["likes"] == []
|
||||
assert history_item["like_count"] == 0
|
||||
refute Map.has_key?(history_item, "pleroma")
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in notes" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in polls" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Question",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "vote!",
|
||||
"anyOf" => [
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "a",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 3
|
||||
}
|
||||
},
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "b",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "vote now!")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "vote!"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "vote now!"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
end
|
||||
|
||||
describe "prepare outgoing" do
|
||||
|
@ -384,6 +624,24 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
|||
assert modified["object"]["quoteUrl"] == quote_id
|
||||
assert modified["object"]["quoteUri"] == quote_id
|
||||
end
|
||||
|
||||
test "it adds language of the object to its json-ld context" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"})
|
||||
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.object.data)
|
||||
|
||||
assert [_, _, %{"@language" => "pl"}] = modified["@context"]
|
||||
end
|
||||
|
||||
test "it adds language of the object to Create activity json-ld context" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"})
|
||||
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.data)
|
||||
|
||||
assert [_, _, %{"@language" => "pl"}] = modified["@context"]
|
||||
end
|
||||
end
|
||||
|
||||
describe "actor rewriting" do
|
||||
|
@ -621,5 +879,14 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
|||
processed = Transmogrifier.prepare_object(original)
|
||||
assert processed["formerRepresentations"] == original["formerRepresentations"]
|
||||
end
|
||||
|
||||
test "it uses contentMap to specify post language" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"})
|
||||
object = Transmogrifier.prepare_object(activity.object.data)
|
||||
|
||||
assert %{"contentMap" => %{"pl" => "Cześć"}} = object
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -173,16 +173,30 @@ defmodule Pleroma.Web.ActivityPub.UtilsTest do
|
|||
end
|
||||
end
|
||||
|
||||
test "make_json_ld_header/0" do
|
||||
assert Utils.make_json_ld_header() == %{
|
||||
"@context" => [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"http://localhost:4001/schemas/litepub-0.1.jsonld",
|
||||
%{
|
||||
"@language" => "und"
|
||||
}
|
||||
]
|
||||
}
|
||||
describe "make_json_ld_header/1" do
|
||||
test "makes jsonld header" do
|
||||
assert Utils.make_json_ld_header() == %{
|
||||
"@context" => [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"http://localhost:4001/schemas/litepub-0.1.jsonld",
|
||||
%{
|
||||
"@language" => "und"
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
test "includes language if specified" do
|
||||
assert Utils.make_json_ld_header(%{"language" => "pl"}) == %{
|
||||
"@context" => [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"http://localhost:4001/schemas/litepub-0.1.jsonld",
|
||||
%{
|
||||
"@language" => "pl"
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
describe "get_existing_votes" do
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
defmodule Pleroma.Web.MastodonAPI.TagControllerTest do
|
||||
use Pleroma.Web.ConnCase
|
||||
|
||||
import Pleroma.Factory
|
||||
import Tesla.Mock
|
||||
|
||||
alias Pleroma.User
|
||||
|
||||
setup do
|
||||
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "GET /api/v1/tags/:id" do
|
||||
test "returns 200 with tag" do
|
||||
%{user: user, conn: conn} = oauth_access(["read"])
|
||||
|
||||
tag = insert(:hashtag, name: "jubjub")
|
||||
{:ok, _user} = User.follow_hashtag(user, tag)
|
||||
|
||||
response =
|
||||
conn
|
||||
|> get("/api/v1/tags/jubjub")
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert %{
|
||||
"name" => "jubjub",
|
||||
"url" => "http://localhost:4001/tags/jubjub",
|
||||
"history" => [],
|
||||
"following" => true
|
||||
} = response
|
||||
end
|
||||
|
||||
test "returns 404 with unknown tag" do
|
||||
%{conn: conn} = oauth_access(["read"])
|
||||
|
||||
conn
|
||||
|> get("/api/v1/tags/jubjub")
|
||||
|> json_response_and_validate_schema(404)
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST /api/v1/tags/:id/follow" do
|
||||
test "should follow a hashtag" do
|
||||
%{user: user, conn: conn} = oauth_access(["write:follows"])
|
||||
hashtag = insert(:hashtag, name: "jubjub")
|
||||
|
||||
response =
|
||||
conn
|
||||
|> post("/api/v1/tags/jubjub/follow")
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert response["following"] == true
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
assert User.following_hashtag?(user, hashtag)
|
||||
end
|
||||
|
||||
test "should 404 if hashtag doesn't exist" do
|
||||
%{conn: conn} = oauth_access(["write:follows"])
|
||||
|
||||
response =
|
||||
conn
|
||||
|> post("/api/v1/tags/rubrub/follow")
|
||||
|> json_response_and_validate_schema(404)
|
||||
|
||||
assert response["error"] == "Hashtag not found"
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST /api/v1/tags/:id/unfollow" do
|
||||
test "should unfollow a hashtag" do
|
||||
%{user: user, conn: conn} = oauth_access(["write:follows"])
|
||||
hashtag = insert(:hashtag, name: "jubjub")
|
||||
{:ok, user} = User.follow_hashtag(user, hashtag)
|
||||
|
||||
response =
|
||||
conn
|
||||
|> post("/api/v1/tags/jubjub/unfollow")
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert response["following"] == false
|
||||
user = User.get_cached_by_ap_id(user.ap_id)
|
||||
refute User.following_hashtag?(user, hashtag)
|
||||
end
|
||||
|
||||
test "should 404 if hashtag doesn't exist" do
|
||||
%{conn: conn} = oauth_access(["write:follows"])
|
||||
|
||||
response =
|
||||
conn
|
||||
|> post("/api/v1/tags/rubrub/unfollow")
|
||||
|> json_response_and_validate_schema(404)
|
||||
|
||||
assert response["error"] == "Hashtag not found"
|
||||
end
|
||||
end
|
||||
|
||||
describe "GET /api/v1/followed_tags" do
|
||||
test "should list followed tags" do
|
||||
%{user: user, conn: conn} = oauth_access(["read:follows"])
|
||||
|
||||
response =
|
||||
conn
|
||||
|> get("/api/v1/followed_tags")
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert Enum.empty?(response)
|
||||
|
||||
hashtag = insert(:hashtag, name: "jubjub")
|
||||
{:ok, _user} = User.follow_hashtag(user, hashtag)
|
||||
|
||||
response =
|
||||
conn
|
||||
|> get("/api/v1/followed_tags")
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert [%{"name" => "jubjub"}] = response
|
||||
end
|
||||
|
||||
test "should include a link header to paginate" do
|
||||
%{user: user, conn: conn} = oauth_access(["read:follows"])
|
||||
|
||||
for i <- 1..21 do
|
||||
hashtag = insert(:hashtag, name: "jubjub#{i}}")
|
||||
{:ok, _user} = User.follow_hashtag(user, hashtag)
|
||||
end
|
||||
|
||||
response =
|
||||
conn
|
||||
|> get("/api/v1/followed_tags")
|
||||
|
||||
json = json_response_and_validate_schema(response, 200)
|
||||
assert Enum.count(json) == 20
|
||||
assert [link_header] = get_resp_header(response, "link")
|
||||
assert link_header =~ "rel=\"next\""
|
||||
next_link = extract_next_link_header(link_header)
|
||||
|
||||
response =
|
||||
conn
|
||||
|> get(next_link)
|
||||
|> json_response_and_validate_schema(200)
|
||||
|
||||
assert Enum.count(response) == 1
|
||||
end
|
||||
|
||||
test "should refuse access without read:follows scope" do
|
||||
%{conn: conn} = oauth_access(["write"])
|
||||
|
||||
conn
|
||||
|> get("/api/v1/followed_tags")
|
||||
|> json_response_and_validate_schema(403)
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_next_link_header(header) do
|
||||
[_, next_link] = Regex.run(~r{<(?<next_link>.*)>; rel="next"}, header)
|
||||
next_link
|
||||
end
|
||||
end
|
|
@ -951,6 +951,26 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do
|
|||
assert status.edited_at
|
||||
end
|
||||
|
||||
test "it shows post language" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, post} = CommonAPI.post(user, %{status: "Szczęść Boże", language: "pl"})
|
||||
|
||||
status = StatusView.render("show.json", activity: post)
|
||||
|
||||
assert status.language == "pl"
|
||||
end
|
||||
|
||||
test "doesn't show post language if it's 'und'" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, post} = CommonAPI.post(user, %{status: "sdifjogijodfg", language: "und"})
|
||||
|
||||
status = StatusView.render("show.json", activity: post)
|
||||
|
||||
assert status.language == nil
|
||||
end
|
||||
|
||||
test "with a source object" do
|
||||
note =
|
||||
insert(:note,
|
||||
|
|
|
@ -248,8 +248,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
|
|||
|
||||
response = get(conn, url)
|
||||
|
||||
assert response.status == 302
|
||||
assert redirected_to(response) == media_proxy_url
|
||||
assert response.status == 301
|
||||
assert redirected_to(response, 301) == media_proxy_url
|
||||
end
|
||||
|
||||
test "with `static` param and non-GIF image preview requested, " <>
|
||||
|
@ -290,8 +290,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
|
|||
|
||||
response = get(conn, url)
|
||||
|
||||
assert response.status == 302
|
||||
assert redirected_to(response) == media_proxy_url
|
||||
assert response.status == 301
|
||||
assert redirected_to(response, 301) == media_proxy_url
|
||||
end
|
||||
|
||||
test "thumbnails PNG images into PNG", %{
|
||||
|
@ -356,5 +356,32 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
|
|||
assert response.status == 302
|
||||
assert redirected_to(response) == media_proxy_url
|
||||
end
|
||||
|
||||
test "redirects to media proxy URI with 301 when image is too small for preview", %{
|
||||
conn: conn,
|
||||
url: url,
|
||||
media_proxy_url: media_proxy_url
|
||||
} do
|
||||
clear_config([:media_preview_proxy],
|
||||
enabled: true,
|
||||
min_content_length: 1000,
|
||||
image_quality: 85,
|
||||
thumbnail_max_width: 100,
|
||||
thumbnail_max_height: 100
|
||||
)
|
||||
|
||||
Tesla.Mock.mock(fn
|
||||
%{method: :head, url: ^media_proxy_url} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: "",
|
||||
headers: [{"content-type", "image/png"}, {"content-length", "500"}]
|
||||
}
|
||||
end)
|
||||
|
||||
response = get(conn, url)
|
||||
assert response.status == 301
|
||||
assert redirected_to(response, 301) == media_proxy_url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do
|
|||
|
||||
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Web.Metadata.Providers.OpenGraph
|
||||
alias Pleroma.Web.Metadata.Utils
|
||||
|
||||
setup do
|
||||
ConfigMock
|
||||
|
@ -197,4 +198,58 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do
|
|||
"http://localhost:4001/proxy/preview/LzAnlke-l5oZbNzWsrHfprX1rGw/aHR0cHM6Ly9wbGVyb21hLmdvdi9hYm91dC9qdWNoZS53ZWJt/juche.webm"
|
||||
], []} in result
|
||||
end
|
||||
|
||||
test "meta tag ordering matches attachment order" do
|
||||
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
|
||||
|
||||
note =
|
||||
insert(:note, %{
|
||||
data: %{
|
||||
"actor" => user.ap_id,
|
||||
"tag" => [],
|
||||
"id" => "https://pleroma.gov/objects/whatever",
|
||||
"summary" => "",
|
||||
"content" => "pleroma in a nutshell",
|
||||
"attachment" => [
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/first.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/second.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
result = OpenGraph.build_tags(%{object: note, url: note.data["id"], user: user})
|
||||
|
||||
assert [
|
||||
{:meta, [property: "og:title", content: Utils.user_name_string(user)], []},
|
||||
{:meta, [property: "og:url", content: "https://pleroma.gov/objects/whatever"], []},
|
||||
{:meta, [property: "og:description", content: "pleroma in a nutshell"], []},
|
||||
{:meta, [property: "og:type", content: "article"], []},
|
||||
{:meta, [property: "og:image", content: "https://example.com/first.png"], []},
|
||||
{:meta, [property: "og:image:alt", content: nil], []},
|
||||
{:meta, [property: "og:image:width", content: "1280"], []},
|
||||
{:meta, [property: "og:image:height", content: "1024"], []},
|
||||
{:meta, [property: "og:image", content: "https://example.com/second.png"], []},
|
||||
{:meta, [property: "og:image:alt", content: nil], []},
|
||||
{:meta, [property: "og:image:width", content: "1280"], []},
|
||||
{:meta, [property: "og:image:height", content: "1024"], []}
|
||||
] == result
|
||||
end
|
||||
end
|
||||
|
|
|
@ -202,4 +202,58 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCardTest do
|
|||
{:meta, [name: "twitter:player:stream:content_type", content: "video/webm"], []}
|
||||
] == result
|
||||
end
|
||||
|
||||
test "meta tag ordering matches attachment order" do
|
||||
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
|
||||
|
||||
note =
|
||||
insert(:note, %{
|
||||
data: %{
|
||||
"actor" => user.ap_id,
|
||||
"tag" => [],
|
||||
"id" => "https://pleroma.gov/objects/whatever",
|
||||
"summary" => "",
|
||||
"content" => "pleroma in a nutshell",
|
||||
"attachment" => [
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/first.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/second.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
result = TwitterCard.build_tags(%{object: note, activity_id: note.data["id"], user: user})
|
||||
|
||||
assert [
|
||||
{:meta, [name: "twitter:title", content: Utils.user_name_string(user)], []},
|
||||
{:meta, [name: "twitter:description", content: "pleroma in a nutshell"], []},
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta, [name: "twitter:image", content: "https://example.com/first.png"], []},
|
||||
{:meta, [name: "twitter:image:alt", content: ""], []},
|
||||
{:meta, [name: "twitter:player:width", content: "1280"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "1024"], []},
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta, [name: "twitter:image", content: "https://example.com/second.png"], []},
|
||||
{:meta, [name: "twitter:image:alt", content: ""], []},
|
||||
{:meta, [name: "twitter:player:width", content: "1280"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "1024"], []}
|
||||
] == result
|
||||
end
|
||||
end
|
||||
|
|
|
@ -83,4 +83,23 @@ defmodule Pleroma.Web.RichMedia.CardTest do
|
|||
Card.get_by_activity(activity)
|
||||
)
|
||||
end
|
||||
|
||||
test "refuses to crawl URL in activity from ignored host/domain" do
|
||||
clear_config([:rich_media, :ignore_hosts], ["example.com"])
|
||||
|
||||
user = insert(:user)
|
||||
|
||||
url = "https://example.com/ogp"
|
||||
|
||||
{:ok, activity} =
|
||||
CommonAPI.post(user, %{
|
||||
status: "[test](#{url})",
|
||||
content_type: "text/markdown"
|
||||
})
|
||||
|
||||
refute_enqueued(
|
||||
worker: RichMediaWorker,
|
||||
args: %{"url" => url, "activity_id" => activity.id}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -54,7 +54,6 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||
{:ok,
|
||||
%{
|
||||
"card" => "summary",
|
||||
"site" => "@flickr",
|
||||
"image" => "https://farm6.staticflickr.com/5510/14338202952_93595258ff_z.jpg",
|
||||
"title" => "Small Island Developing States Photo Submission",
|
||||
"description" => "View the album on Flickr.",
|
||||
|
|
|
@ -17,10 +17,6 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"site" => nil,
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
|
@ -44,7 +40,7 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
|
||||
"image:alt" => "",
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
|
@ -61,16 +57,12 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"card" => "summary_large_image",
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
|
||||
"image:alt" => "",
|
||||
"site" => nil,
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
"url" =>
|
||||
|
@ -90,13 +82,11 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"site" => "@atlasobscura",
|
||||
"title" => "The Missing Grave of Margaret Corbin, Revolutionary War Veteran",
|
||||
"card" => "summary_large_image",
|
||||
"image" => image_path,
|
||||
"description" =>
|
||||
"She's the only woman veteran honored with a monument at West Point. But where was she buried?",
|
||||
"site_name" => "Atlas Obscura",
|
||||
"type" => "article",
|
||||
"url" => "http://www.atlasobscura.com/articles/margaret-corbin-grave-west-point"
|
||||
}
|
||||
|
@ -109,12 +99,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"site" => nil,
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
|
@ -124,4 +110,23 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
"https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html"
|
||||
}
|
||||
end
|
||||
|
||||
test "takes first image if multiple are specified" do
|
||||
html =
|
||||
File.read!("test/fixtures/fulmo.html")
|
||||
|> Floki.parse_document!()
|
||||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"description" => "Pri feoj, kiuj devis ordigi falintan arbon.",
|
||||
"image" => "https://tirifto.xwx.moe/r/ilustrajhoj/pinglordigado.png",
|
||||
"title" => "Fulmo",
|
||||
"type" => "website",
|
||||
"url" => "https://tirifto.xwx.moe/eo/rakontoj/fulmo.html",
|
||||
"image:alt" =>
|
||||
"Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.",
|
||||
"image:height" => "630",
|
||||
"image:width" => "1200"
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -558,6 +558,36 @@ defmodule Pleroma.Web.StreamerTest do
|
|||
assert_receive {:render_with_user, _, "status_update.json", ^create, _}
|
||||
refute Streamer.filtered_by_user?(user, edited)
|
||||
end
|
||||
|
||||
test "it streams posts containing followed hashtags on the 'user' stream", %{
|
||||
user: user,
|
||||
token: oauth_token
|
||||
} do
|
||||
hashtag = insert(:hashtag, %{name: "tenshi"})
|
||||
other_user = insert(:user)
|
||||
{:ok, user} = User.follow_hashtag(user, hashtag)
|
||||
|
||||
Streamer.get_topic_and_add_socket("user", user, oauth_token)
|
||||
{:ok, activity} = CommonAPI.post(other_user, %{status: "hey #tenshi"})
|
||||
|
||||
assert_receive {:render_with_user, _, "update.json", ^activity, _}
|
||||
end
|
||||
|
||||
test "should not stream private posts containing followed hashtags on the 'user' stream", %{
|
||||
user: user,
|
||||
token: oauth_token
|
||||
} do
|
||||
hashtag = insert(:hashtag, %{name: "tenshi"})
|
||||
other_user = insert(:user)
|
||||
{:ok, user} = User.follow_hashtag(user, hashtag)
|
||||
|
||||
Streamer.get_topic_and_add_socket("user", user, oauth_token)
|
||||
|
||||
{:ok, activity} =
|
||||
CommonAPI.post(other_user, %{status: "hey #tenshi", visibility: "private"})
|
||||
|
||||
refute_receive {:render_with_user, _, "update.json", ^activity, _}
|
||||
end
|
||||
end
|
||||
|
||||
describe "public streams" do
|
||||
|
|
|
@ -668,4 +668,11 @@ defmodule Pleroma.Factory do
|
|||
|> Map.merge(params)
|
||||
|> Pleroma.Announcement.add_rendered_properties()
|
||||
end
|
||||
|
||||
def hashtag_factory(params \\ %{}) do
|
||||
%Pleroma.Hashtag{
|
||||
name: "test #{sequence(:hashtag_name, & &1)}"
|
||||
}
|
||||
|> Map.merge(params)
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue