mirror of
https://git.pleroma.social/pleroma/pleroma.git
synced 2025-03-29 23:05:29 +00:00
Merge branch 'stable' into 'develop'
2.9.0 mergeback See merge request pleroma/pleroma!4332
This commit is contained in:
commit
16944eb9da
23 changed files with 1394 additions and 162 deletions
27
CHANGELOG.md
27
CHANGELOG.md
|
@ -4,6 +4,33 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## 2.9.0
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
||||||
|
- Fix several spoofing vectors
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Include "published" in actor view
|
||||||
|
- Link to exported outbox/followers/following collections in backup actor.json
|
||||||
|
- Hashtag following
|
||||||
|
- Allow to specify post language
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||||
|
- Fix Mastodon incoming edits with inlined "likes"
|
||||||
|
- Allow incoming "Listen" activities
|
||||||
|
- Fix missing check for domain presence in rich media ignore_host configuration
|
||||||
|
- Fix Rich Media parsing of TwitterCards/OpenGraph to adhere to the spec and always choose the first image if multiple are provided.
|
||||||
|
- Fix OpenGraph/TwitterCard meta tag ordering for posts with multiple attachments
|
||||||
|
- Fix blurhash generation crashes
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Retire MRFs DNSRBL, FODirectReply, and QuietReply
|
||||||
|
|
||||||
## 2.8.0
|
## 2.8.0
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
1
changelog.d/c2s-update-verify.fix
Normal file
1
changelog.d/c2s-update-verify.fix
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Verify a local Update sent through AP C2S so users can only update their own objects
|
1
changelog.d/ensure-authorized-fetch.security
Normal file
1
changelog.d/ensure-authorized-fetch.security
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
|
@ -150,7 +150,10 @@ config :mime, :types, %{
|
||||||
"application/xrd+xml" => ["xrd+xml"],
|
"application/xrd+xml" => ["xrd+xml"],
|
||||||
"application/jrd+json" => ["jrd+json"],
|
"application/jrd+json" => ["jrd+json"],
|
||||||
"application/activity+json" => ["activity+json"],
|
"application/activity+json" => ["activity+json"],
|
||||||
"application/ld+json" => ["activity+json"]
|
"application/ld+json" => ["activity+json"],
|
||||||
|
# Can be removed when bumping MIME past 2.0.5
|
||||||
|
# see https://akkoma.dev/AkkomaGang/akkoma/issues/657
|
||||||
|
"image/apng" => ["apng"]
|
||||||
}
|
}
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Adapter.Hackney
|
config :tesla, adapter: Tesla.Adapter.Hackney
|
||||||
|
@ -359,7 +362,8 @@ config :pleroma, :activitypub,
|
||||||
follow_handshake_timeout: 500,
|
follow_handshake_timeout: 500,
|
||||||
note_replies_output_limit: 5,
|
note_replies_output_limit: 5,
|
||||||
sign_object_fetches: true,
|
sign_object_fetches: true,
|
||||||
authorized_fetch_mode: false
|
authorized_fetch_mode: false,
|
||||||
|
client_api_enabled: false
|
||||||
|
|
||||||
config :pleroma, :streamer,
|
config :pleroma, :streamer,
|
||||||
workers: 3,
|
workers: 3,
|
||||||
|
|
|
@ -1772,6 +1772,11 @@ config :pleroma, :config_description, [
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "Following handshake timeout",
|
description: "Following handshake timeout",
|
||||||
suggestions: [500]
|
suggestions: [500]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :client_api_enabled,
|
||||||
|
type: :boolean,
|
||||||
|
description: "Allow client to server ActivityPub interactions"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -38,7 +38,10 @@ config :pleroma, :instance,
|
||||||
external_user_synchronization: false,
|
external_user_synchronization: false,
|
||||||
static_dir: "test/instance_static/"
|
static_dir: "test/instance_static/"
|
||||||
|
|
||||||
config :pleroma, :activitypub, sign_object_fetches: false, follow_handshake_timeout: 0
|
config :pleroma, :activitypub,
|
||||||
|
sign_object_fetches: false,
|
||||||
|
follow_handshake_timeout: 0,
|
||||||
|
client_api_enabled: true
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :pleroma, Pleroma.Repo,
|
config :pleroma, Pleroma.Repo,
|
||||||
|
|
|
@ -93,6 +93,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
)
|
)
|
||||||
|
|
||||||
files = fetch_and_decode!(files_loc)
|
files = fetch_and_decode!(files_loc)
|
||||||
|
files_to_unzip = for({_, f} <- files, do: f)
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||||
|
|
||||||
|
@ -103,17 +104,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
pack_name
|
pack_name
|
||||||
])
|
])
|
||||||
|
|
||||||
files_to_unzip =
|
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip)
|
||||||
Enum.map(
|
|
||||||
files,
|
|
||||||
fn {_, f} -> to_charlist(f) end
|
|
||||||
)
|
|
||||||
|
|
||||||
{:ok, _} =
|
|
||||||
:zip.unzip(binary_archive,
|
|
||||||
cwd: String.to_charlist(pack_path),
|
|
||||||
file_list: files_to_unzip
|
|
||||||
)
|
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||||
|
|
||||||
|
@ -201,7 +192,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
|
|
||||||
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
||||||
|
|
||||||
{:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir))
|
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir)
|
||||||
|
|
||||||
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
||||||
|
|
||||||
|
|
|
@ -24,12 +24,13 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
alias Pleroma.Emoji
|
alias Pleroma.Emoji
|
||||||
alias Pleroma.Emoji.Pack
|
alias Pleroma.Emoji.Pack
|
||||||
|
alias Pleroma.SafeZip
|
||||||
alias Pleroma.Utils
|
alias Pleroma.Utils
|
||||||
|
|
||||||
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
||||||
def create(name) do
|
def create(name) do
|
||||||
with :ok <- validate_not_empty([name]),
|
with :ok <- validate_not_empty([name]),
|
||||||
dir <- Path.join(emoji_path(), name),
|
dir <- path_join_name_safe(emoji_path(), name),
|
||||||
:ok <- File.mkdir(dir) do
|
:ok <- File.mkdir(dir) do
|
||||||
save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")})
|
save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")})
|
||||||
end
|
end
|
||||||
|
@ -65,43 +66,21 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
||||||
def delete(name) do
|
def delete(name) do
|
||||||
with :ok <- validate_not_empty([name]),
|
with :ok <- validate_not_empty([name]),
|
||||||
pack_path <- Path.join(emoji_path(), name) do
|
pack_path <- path_join_name_safe(emoji_path(), name) do
|
||||||
File.rm_rf(pack_path)
|
File.rm_rf(pack_path)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec unpack_zip_emojies(list(tuple())) :: list(map())
|
|
||||||
defp unpack_zip_emojies(zip_files) do
|
|
||||||
Enum.reduce(zip_files, [], fn
|
|
||||||
{_, path, s, _, _, _}, acc when elem(s, 2) == :regular ->
|
|
||||||
with(
|
|
||||||
filename <- Path.basename(path),
|
|
||||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
|
||||||
false <- Emoji.exist?(shortcode)
|
|
||||||
) do
|
|
||||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
|
||||||
else
|
|
||||||
_ -> acc
|
|
||||||
end
|
|
||||||
|
|
||||||
_, acc ->
|
|
||||||
acc
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) ::
|
@spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) ::
|
||||||
{:ok, t()}
|
{:ok, t()}
|
||||||
| {:error, File.posix() | atom()}
|
| {:error, File.posix() | atom()}
|
||||||
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
|
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
|
||||||
with {:ok, zip_files} <- :zip.table(to_charlist(file.path)),
|
with {:ok, zip_files} <- SafeZip.list_dir_file(file.path),
|
||||||
[_ | _] = emojies <- unpack_zip_emojies(zip_files),
|
[_ | _] = emojies <- map_zip_emojies(zip_files),
|
||||||
{:ok, tmp_dir} <- Utils.tmp_dir("emoji") do
|
{:ok, tmp_dir} <- Utils.tmp_dir("emoji") do
|
||||||
try do
|
try do
|
||||||
{:ok, _emoji_files} =
|
{:ok, _emoji_files} =
|
||||||
:zip.unzip(
|
SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojies, & &1[:path]))
|
||||||
to_charlist(file.path),
|
|
||||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
|
|
||||||
)
|
|
||||||
|
|
||||||
{_, updated_pack} =
|
{_, updated_pack} =
|
||||||
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
|
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
|
||||||
|
@ -292,7 +271,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
|
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
|
||||||
def load_pack(name) do
|
def load_pack(name) do
|
||||||
name = Path.basename(name)
|
name = Path.basename(name)
|
||||||
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
pack_file = path_join_name_safe(emoji_path(), name) |> Path.join("pack.json")
|
||||||
|
|
||||||
with {:ok, _} <- File.stat(pack_file),
|
with {:ok, _} <- File.stat(pack_file),
|
||||||
{:ok, pack_data} <- File.read(pack_file) do
|
{:ok, pack_data} <- File.read(pack_file) do
|
||||||
|
@ -416,10 +395,9 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_archive_and_cache(pack, hash) do
|
defp create_archive_and_cache(pack, hash) do
|
||||||
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end)
|
||||||
|
files = ["pack.json" | pack_file_list]
|
||||||
{:ok, {_, result}} =
|
{:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true)
|
||||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
|
||||||
|
|
||||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||||
|
@ -478,7 +456,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
|
defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
|
||||||
file_path = Path.join(pack.path, filename)
|
file_path = path_join_safe(pack.path, filename)
|
||||||
create_subdirs(file_path)
|
create_subdirs(file_path)
|
||||||
|
|
||||||
with {:ok, _} <- File.copy(upload_path, file_path) do
|
with {:ok, _} <- File.copy(upload_path, file_path) do
|
||||||
|
@ -497,8 +475,8 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp rename_file(pack, filename, new_filename) do
|
defp rename_file(pack, filename, new_filename) do
|
||||||
old_path = Path.join(pack.path, filename)
|
old_path = path_join_safe(pack.path, filename)
|
||||||
new_path = Path.join(pack.path, new_filename)
|
new_path = path_join_safe(pack.path, new_filename)
|
||||||
create_subdirs(new_path)
|
create_subdirs(new_path)
|
||||||
|
|
||||||
with :ok <- File.rename(old_path, new_path) do
|
with :ok <- File.rename(old_path, new_path) do
|
||||||
|
@ -516,7 +494,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
defp remove_file(pack, shortcode) do
|
defp remove_file(pack, shortcode) do
|
||||||
with {:ok, filename} <- get_filename(pack, shortcode),
|
with {:ok, filename} <- get_filename(pack, shortcode),
|
||||||
emoji <- Path.join(pack.path, filename),
|
emoji <- path_join_safe(pack.path, filename),
|
||||||
:ok <- File.rm(emoji) do
|
:ok <- File.rm(emoji) do
|
||||||
remove_dir_if_empty(emoji, filename)
|
remove_dir_if_empty(emoji, filename)
|
||||||
end
|
end
|
||||||
|
@ -534,7 +512,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
defp get_filename(pack, shortcode) do
|
defp get_filename(pack, shortcode) do
|
||||||
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
|
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
|
||||||
file_path <- Path.join(pack.path, filename),
|
file_path <- path_join_safe(pack.path, filename),
|
||||||
{:ok, _} <- File.stat(file_path) do
|
{:ok, _} <- File.stat(file_path) do
|
||||||
{:ok, filename}
|
{:ok, filename}
|
||||||
else
|
else
|
||||||
|
@ -584,11 +562,10 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||||
with :ok <- File.mkdir_p!(local_pack.path) do
|
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
files = Enum.map(remote_pack["files"], fn {_, path} -> path end)
|
||||||
# Fallback cannot contain a pack.json file
|
# Fallback cannot contain a pack.json file
|
||||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
files = if pack_info[:fallback], do: files, else: ["pack.json" | files]
|
||||||
|
SafeZip.unzip_data(archive, local_pack.path, files)
|
||||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -649,13 +626,43 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp validate_has_all_files(pack, zip) do
|
defp validate_has_all_files(pack, zip) do
|
||||||
with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do
|
# Check if all files from the pack.json are in the archive
|
||||||
# Check if all files from the pack.json are in the archive
|
eset =
|
||||||
pack.files
|
Enum.reduce(pack.files, MapSet.new(), fn
|
||||||
|> Enum.all?(fn {_, from_manifest} ->
|
{_, file}, s -> MapSet.put(s, to_charlist(file))
|
||||||
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
|
||||||
end)
|
end)
|
||||||
|> if(do: :ok, else: {:error, :incomplete})
|
|
||||||
|
if SafeZip.contains_all_data?(zip, eset),
|
||||||
|
do: :ok,
|
||||||
|
else: {:error, :incomplete}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp path_join_name_safe(dir, name) do
|
||||||
|
if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do
|
||||||
|
raise "Invalid or malicious pack name: #{name}"
|
||||||
|
else
|
||||||
|
Path.join(dir, name)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp path_join_safe(dir, path) do
|
||||||
|
{:ok, safe_path} = Path.safe_relative(path)
|
||||||
|
Path.join(dir, safe_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp map_zip_emojies(zip_files) do
|
||||||
|
Enum.reduce(zip_files, [], fn path, acc ->
|
||||||
|
with(
|
||||||
|
filename <- Path.basename(path),
|
||||||
|
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||||
|
# note: this only checks the shortcode, if an emoji already exists on the same path, but
|
||||||
|
# with a different shortcode, the existing one will be degraded to an alias of the new
|
||||||
|
false <- Emoji.exist?(shortcode)
|
||||||
|
) do
|
||||||
|
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||||
|
else
|
||||||
|
_ -> acc
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -65,24 +65,12 @@ defmodule Pleroma.Frontend do
|
||||||
end
|
end
|
||||||
|
|
||||||
def unzip(zip, dest) do
|
def unzip(zip, dest) do
|
||||||
with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do
|
File.rm_rf!(dest)
|
||||||
File.rm_rf!(dest)
|
File.mkdir_p!(dest)
|
||||||
File.mkdir_p!(dest)
|
|
||||||
|
|
||||||
Enum.each(unzipped, fn {filename, data} ->
|
case Pleroma.SafeZip.unzip_data(zip, dest) do
|
||||||
path = filename
|
{:ok, _} -> :ok
|
||||||
|
error -> error
|
||||||
new_file_path = Path.join(dest, path)
|
|
||||||
|
|
||||||
path
|
|
||||||
|> Path.dirname()
|
|
||||||
|> then(&Path.join(dest, &1))
|
|
||||||
|> File.mkdir_p!()
|
|
||||||
|
|
||||||
if not File.dir?(new_file_path) do
|
|
||||||
File.write!(new_file_path, data)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
216
lib/pleroma/safe_zip.ex
Normal file
216
lib/pleroma/safe_zip.ex
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
# Akkoma: Magically expressive social media
|
||||||
|
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.SafeZip do
|
||||||
|
@moduledoc """
|
||||||
|
Wraps the subset of Erlang's zip module we’d like to use
|
||||||
|
but enforces path-traversal safety everywhere and other checks.
|
||||||
|
|
||||||
|
For convenience almost all functions accept both elixir strings and charlists,
|
||||||
|
but output elixir strings themselves. However, this means the input parameter type
|
||||||
|
can no longer be used to distinguish archive file paths from archive binary data in memory,
|
||||||
|
thus where needed both a _data and _file variant are provided.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@type text() :: String.t() | [char()]
|
||||||
|
|
||||||
|
defp safe_path?(path) do
|
||||||
|
# Path accepts elixir’s chardata()
|
||||||
|
case Path.safe_relative(path) do
|
||||||
|
{:ok, _} -> true
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp safe_type?(file_type) do
|
||||||
|
if file_type in [:regular, :directory] do
|
||||||
|
true
|
||||||
|
else
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_add_file(_type, _path_charlist, nil), do: nil
|
||||||
|
|
||||||
|
defp maybe_add_file(:regular, path_charlist, file_list),
|
||||||
|
do: [to_string(path_charlist) | file_list]
|
||||||
|
|
||||||
|
defp maybe_add_file(_type, _path_charlist, file_list), do: file_list
|
||||||
|
|
||||||
|
@spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) ::
|
||||||
|
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||||
|
defp check_safe_archive_and_maybe_list_files(archive, opts, list) do
|
||||||
|
acc = if list, do: [], else: nil
|
||||||
|
|
||||||
|
with {:ok, table} <- :zip.table(archive, opts) do
|
||||||
|
Enum.reduce_while(table, {:ok, acc}, fn
|
||||||
|
# ZIP comment
|
||||||
|
{:zip_comment, _}, acc ->
|
||||||
|
{:cont, acc}
|
||||||
|
|
||||||
|
# File entry
|
||||||
|
{:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} ->
|
||||||
|
with {_, type} <- {:get_type, elem(info, 2)},
|
||||||
|
{_, true} <- {:type, safe_type?(type)},
|
||||||
|
{_, true} <- {:safe_path, safe_path?(path)} do
|
||||||
|
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||||
|
else
|
||||||
|
{:get_type, e} ->
|
||||||
|
{:halt,
|
||||||
|
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
||||||
|
|
||||||
|
{:type, _} ->
|
||||||
|
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||||
|
|
||||||
|
{:safe_path, _} ->
|
||||||
|
{:halt, {:error, "Unsafe path in ZIP: #{path}"}}
|
||||||
|
end
|
||||||
|
|
||||||
|
# new OTP version?
|
||||||
|
_, _acc ->
|
||||||
|
{:halt, {:error, "Unknown ZIP record type"}}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec check_safe_archive_and_list_files(binary() | [char()], [term()]) ::
|
||||||
|
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||||
|
defp check_safe_archive_and_list_files(archive, opts \\ []) do
|
||||||
|
check_safe_archive_and_maybe_list_files(archive, opts, true)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()}
|
||||||
|
defp check_safe_archive(archive, opts \\ []) do
|
||||||
|
case check_safe_archive_and_maybe_list_files(archive, opts, false) do
|
||||||
|
{:ok, _} -> :ok
|
||||||
|
error -> error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec check_safe_file_list([text()], text()) :: :ok | {:error, term()}
|
||||||
|
defp check_safe_file_list([], _), do: :ok
|
||||||
|
|
||||||
|
defp check_safe_file_list([path | tail], cwd) do
|
||||||
|
with {_, true} <- {:path, safe_path?(path)},
|
||||||
|
{_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))},
|
||||||
|
{_, true} <- {:type, safe_type?(fstat.type)} do
|
||||||
|
check_safe_file_list(tail, cwd)
|
||||||
|
else
|
||||||
|
{:path, _} ->
|
||||||
|
{:error, "Unsafe path escaping cwd: #{path}"}
|
||||||
|
|
||||||
|
{:stat, e} ->
|
||||||
|
{:error, "Unable to check file type of #{path}: #{inspect(e)}"}
|
||||||
|
|
||||||
|
{:type, _} ->
|
||||||
|
{:error, "Unsafe type at #{path}"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"}
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Checks whether the archive data contais file entries for all paths from fset
|
||||||
|
|
||||||
|
Note this really only accepts entries corresponding to regular _files_,
|
||||||
|
if a path is contained as for example an directory, this does not count as a match.
|
||||||
|
"""
|
||||||
|
@spec contains_all_data?(binary(), MapSet.t()) :: true | false
|
||||||
|
def contains_all_data?(archive_data, fset) do
|
||||||
|
with {:ok, table} <- :zip.table(archive_data) do
|
||||||
|
remaining =
|
||||||
|
Enum.reduce(table, fset, fn
|
||||||
|
{:zip_file, path, info, _comment, _offset, _comp_size}, fset ->
|
||||||
|
if elem(info, 2) == :regular do
|
||||||
|
MapSet.delete(fset, path)
|
||||||
|
else
|
||||||
|
fset
|
||||||
|
end
|
||||||
|
|
||||||
|
_, _ ->
|
||||||
|
fset
|
||||||
|
end)
|
||||||
|
|> MapSet.size()
|
||||||
|
|
||||||
|
if remaining == 0, do: true, else: false
|
||||||
|
else
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
List all file entries in ZIP, or error if invalid or unsafe.
|
||||||
|
|
||||||
|
Note this really only lists regular files, no directories, ZIP comments or other types!
|
||||||
|
"""
|
||||||
|
@spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()}
|
||||||
|
def list_dir_file(archive) do
|
||||||
|
path = to_charlist(archive)
|
||||||
|
check_safe_archive_and_list_files(path)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}}
|
||||||
|
defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)}
|
||||||
|
defp stringify_zip(ret), do: ret
|
||||||
|
|
||||||
|
@spec zip(text(), text(), [text()], boolean()) ::
|
||||||
|
{:ok, file_name :: String.t()}
|
||||||
|
| {:ok, {file_name :: String.t(), file_data :: binary()}}
|
||||||
|
| {:error, reason :: term()}
|
||||||
|
def zip(name, file_list, cwd, memory \\ false) do
|
||||||
|
opts = [{:cwd, to_charlist(cwd)}]
|
||||||
|
opts = if memory, do: [:memory | opts], else: opts
|
||||||
|
|
||||||
|
with :ok <- check_safe_file_list(file_list, cwd) do
|
||||||
|
file_list = for f <- file_list, do: to_charlist(f)
|
||||||
|
name = to_charlist(name)
|
||||||
|
stringify_zip(:zip.zip(name, file_list, opts))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec unzip_file(text(), text(), [text()] | nil) ::
|
||||||
|
{:ok, [String.t()]}
|
||||||
|
| {:error, reason :: term()}
|
||||||
|
| {:error, {name :: text(), reason :: term()}}
|
||||||
|
def unzip_file(archive, target_dir, file_list \\ nil) do
|
||||||
|
do_unzip(to_charlist(archive), to_charlist(target_dir), file_list)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec unzip_data(binary(), text(), [text()] | nil) ::
|
||||||
|
{:ok, [String.t()]}
|
||||||
|
| {:error, reason :: term()}
|
||||||
|
| {:error, {name :: text(), reason :: term()}}
|
||||||
|
def unzip_data(archive, target_dir, file_list \\ nil) do
|
||||||
|
do_unzip(archive, to_charlist(target_dir), file_list)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}),
|
||||||
|
do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)}
|
||||||
|
|
||||||
|
defp stringify_unzip({:ok, [_fname | _] = filelist}),
|
||||||
|
do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)}
|
||||||
|
|
||||||
|
defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}}
|
||||||
|
defp stringify_unzip(ret), do: ret
|
||||||
|
|
||||||
|
@spec do_unzip(binary() | [char()], text(), [text()] | nil) ::
|
||||||
|
{:ok, [String.t()]}
|
||||||
|
| {:error, reason :: term()}
|
||||||
|
| {:error, {name :: text(), reason :: term()}}
|
||||||
|
defp do_unzip(archive, target_dir, file_list) do
|
||||||
|
opts =
|
||||||
|
if file_list != nil do
|
||||||
|
[
|
||||||
|
file_list: for(f <- file_list, do: to_charlist(f)),
|
||||||
|
cwd: target_dir
|
||||||
|
]
|
||||||
|
else
|
||||||
|
[cwd: target_dir]
|
||||||
|
end
|
||||||
|
|
||||||
|
with :ok <- check_safe_archive(archive) do
|
||||||
|
stringify_unzip(:zip.unzip(archive, opts))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.User.Backup do
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.SafeZip
|
||||||
alias Pleroma.Uploaders.Uploader
|
alias Pleroma.Uploaders.Uploader
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
@ -179,12 +180,12 @@ defmodule Pleroma.User.Backup do
|
||||||
end
|
end
|
||||||
|
|
||||||
@files [
|
@files [
|
||||||
~c"actor.json",
|
"actor.json",
|
||||||
~c"outbox.json",
|
"outbox.json",
|
||||||
~c"likes.json",
|
"likes.json",
|
||||||
~c"bookmarks.json",
|
"bookmarks.json",
|
||||||
~c"followers.json",
|
"followers.json",
|
||||||
~c"following.json"
|
"following.json"
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec run(t()) :: {:ok, t()} | {:error, :failed}
|
@spec run(t()) :: {:ok, t()} | {:error, :failed}
|
||||||
|
@ -200,7 +201,7 @@ defmodule Pleroma.User.Backup do
|
||||||
{_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
|
{_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
|
||||||
{_, :ok} <- {:following, following(backup.tempdir, backup.user)},
|
{_, :ok} <- {:following, following(backup.tempdir, backup.user)},
|
||||||
{_, {:ok, _zip_path}} <-
|
{_, {:ok, _zip_path}} <-
|
||||||
{:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
|
{:zip, SafeZip.zip(tempfile, @files, backup.tempdir)},
|
||||||
{_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
|
{_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
|
||||||
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
|
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
|
||||||
{:ok, updated_backup}
|
{:ok, updated_backup}
|
||||||
|
|
|
@ -43,6 +43,38 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
|> fix_content_map()
|
|> fix_content_map()
|
||||||
|> fix_addressing()
|
|> fix_addressing()
|
||||||
|> fix_summary()
|
|> fix_summary()
|
||||||
|
|> fix_history(&fix_object/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_fix_object(%{"attributedTo" => _} = object), do: fix_object(object)
|
||||||
|
defp maybe_fix_object(object), do: object
|
||||||
|
|
||||||
|
defp fix_history(%{"formerRepresentations" => %{"orderedItems" => list}} = obj, fix_fun)
|
||||||
|
when is_list(list) do
|
||||||
|
update_in(obj["formerRepresentations"]["orderedItems"], fn h -> Enum.map(h, fix_fun) end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fix_history(obj, _), do: obj
|
||||||
|
|
||||||
|
defp fix_recursive(obj, fun) do
|
||||||
|
# unlike Erlang, Elixir does not support recursive inline functions
|
||||||
|
# which would allow us to avoid reconstructing this on every recursion
|
||||||
|
rec_fun = fn
|
||||||
|
obj when is_map(obj) -> fix_recursive(obj, fun)
|
||||||
|
# there may be simple AP IDs in history (or object field)
|
||||||
|
obj -> obj
|
||||||
|
end
|
||||||
|
|
||||||
|
obj
|
||||||
|
|> fun.()
|
||||||
|
|> fix_history(rec_fun)
|
||||||
|
|> then(fn
|
||||||
|
%{"object" => object} = doc when is_map(object) ->
|
||||||
|
update_in(doc["object"], rec_fun)
|
||||||
|
|
||||||
|
apdoc ->
|
||||||
|
apdoc
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_summary(%{"summary" => nil} = object) do
|
def fix_summary(%{"summary" => nil} = object) do
|
||||||
|
@ -375,11 +407,18 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(data, options \\ [])
|
def handle_incoming(data, options \\ []) do
|
||||||
|
data
|
||||||
|
|> fix_recursive(&strip_internal_fields/1)
|
||||||
|
|> handle_incoming_normalized(options)
|
||||||
|
end
|
||||||
|
|
||||||
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
||||||
# with nil ID.
|
# with nil ID.
|
||||||
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
|
defp handle_incoming_normalized(
|
||||||
|
%{"type" => "Flag", "object" => objects, "actor" => actor} = data,
|
||||||
|
_options
|
||||||
|
) do
|
||||||
with context <- data["context"] || Utils.generate_context_id(),
|
with context <- data["context"] || Utils.generate_context_id(),
|
||||||
content <- data["content"] || "",
|
content <- data["content"] || "",
|
||||||
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
||||||
|
@ -400,16 +439,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
|
|
||||||
# disallow objects with bogus IDs
|
# disallow objects with bogus IDs
|
||||||
def handle_incoming(%{"id" => nil}, _options), do: :error
|
defp handle_incoming_normalized(%{"id" => nil}, _options), do: :error
|
||||||
def handle_incoming(%{"id" => ""}, _options), do: :error
|
defp handle_incoming_normalized(%{"id" => ""}, _options), do: :error
|
||||||
# length of https:// = 8, should validate better, but good enough for now.
|
# length of https:// = 8, should validate better, but good enough for now.
|
||||||
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
|
defp handle_incoming_normalized(%{"id" => id}, _options)
|
||||||
do: :error
|
when is_binary(id) and byte_size(id) < 8,
|
||||||
|
do: :error
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
||||||
options
|
options
|
||||||
) do
|
) do
|
||||||
actor = Containment.get_actor(data)
|
actor = Containment.get_actor(data)
|
||||||
|
|
||||||
data =
|
data =
|
||||||
|
@ -451,25 +491,25 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
"star" => "⭐"
|
"star" => "⭐"
|
||||||
}
|
}
|
||||||
|
|
||||||
@doc "Rewrite misskey likes into EmojiReacts"
|
# Rewrite misskey likes into EmojiReacts
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{
|
%{
|
||||||
"type" => "Like",
|
"type" => "Like",
|
||||||
"_misskey_reaction" => reaction
|
"_misskey_reaction" => reaction
|
||||||
} = data,
|
} = data,
|
||||||
options
|
options
|
||||||
) do
|
) do
|
||||||
data
|
data
|
||||||
|> Map.put("type", "EmojiReact")
|
|> Map.put("type", "EmojiReact")
|
||||||
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
||||||
|> handle_incoming(options)
|
|> handle_incoming_normalized(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
||||||
options
|
options
|
||||||
)
|
)
|
||||||
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
|
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
|
||||||
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
|
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
|
||||||
|
|
||||||
object =
|
object =
|
||||||
|
@ -492,8 +532,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(%{"type" => type} = data, _options)
|
defp handle_incoming_normalized(%{"type" => type} = data, _options)
|
||||||
when type in ~w{Like EmojiReact Announce Add Remove} do
|
when type in ~w{Like EmojiReact Announce Add Remove} do
|
||||||
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
||||||
{:ok, activity, _meta} <-
|
{:ok, activity, _meta} <-
|
||||||
Pipeline.common_pipeline(data, local: false) do
|
Pipeline.common_pipeline(data, local: false) do
|
||||||
|
@ -503,11 +543,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{"type" => type} = data,
|
%{"type" => type} = data,
|
||||||
_options
|
_options
|
||||||
)
|
)
|
||||||
when type in ~w{Update Block Follow Accept Reject} do
|
when type in ~w{Update Block Follow Accept Reject} do
|
||||||
|
fixed_obj = maybe_fix_object(data["object"])
|
||||||
|
data = if fixed_obj != nil, do: %{data | "object" => fixed_obj}, else: data
|
||||||
|
|
||||||
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
||||||
{:ok, activity, _} <-
|
{:ok, activity, _} <-
|
||||||
Pipeline.common_pipeline(data, local: false) do
|
Pipeline.common_pipeline(data, local: false) do
|
||||||
|
@ -515,10 +558,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{"type" => "Delete"} = data,
|
%{"type" => "Delete"} = data,
|
||||||
_options
|
_options
|
||||||
) do
|
) do
|
||||||
with {:ok, activity, _} <-
|
with {:ok, activity, _} <-
|
||||||
Pipeline.common_pipeline(data, local: false) do
|
Pipeline.common_pipeline(data, local: false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
|
@ -541,15 +584,15 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{
|
%{
|
||||||
"type" => "Undo",
|
"type" => "Undo",
|
||||||
"object" => %{"type" => "Follow", "object" => followed},
|
"object" => %{"type" => "Follow", "object" => followed},
|
||||||
"actor" => follower,
|
"actor" => follower,
|
||||||
"id" => id
|
"id" => id
|
||||||
} = _data,
|
} = _data,
|
||||||
_options
|
_options
|
||||||
) do
|
) do
|
||||||
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
||||||
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
||||||
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
||||||
|
@ -560,46 +603,46 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{
|
%{
|
||||||
"type" => "Undo",
|
"type" => "Undo",
|
||||||
"object" => %{"type" => type}
|
"object" => %{"type" => type}
|
||||||
} = data,
|
} = data,
|
||||||
_options
|
_options
|
||||||
)
|
)
|
||||||
when type in ["Like", "EmojiReact", "Announce", "Block"] do
|
when type in ["Like", "EmojiReact", "Announce", "Block"] do
|
||||||
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# For Undos that don't have the complete object attached, try to find it in our database.
|
# For Undos that don't have the complete object attached, try to find it in our database.
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{
|
%{
|
||||||
"type" => "Undo",
|
"type" => "Undo",
|
||||||
"object" => object
|
"object" => object
|
||||||
} = activity,
|
} = activity,
|
||||||
options
|
options
|
||||||
)
|
)
|
||||||
when is_binary(object) do
|
when is_binary(object) do
|
||||||
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
||||||
activity
|
activity
|
||||||
|> Map.put("object", data)
|
|> Map.put("object", data)
|
||||||
|> handle_incoming(options)
|
|> handle_incoming_normalized(options)
|
||||||
else
|
else
|
||||||
_e -> :error
|
_e -> :error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
defp handle_incoming_normalized(
|
||||||
%{
|
%{
|
||||||
"type" => "Move",
|
"type" => "Move",
|
||||||
"actor" => origin_actor,
|
"actor" => origin_actor,
|
||||||
"object" => origin_actor,
|
"object" => origin_actor,
|
||||||
"target" => target_actor
|
"target" => target_actor
|
||||||
},
|
},
|
||||||
_options
|
_options
|
||||||
) do
|
) do
|
||||||
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
||||||
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
|
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
|
||||||
true <- origin_actor in target_user.also_known_as do
|
true <- origin_actor in target_user.also_known_as do
|
||||||
|
@ -609,7 +652,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(_, _), do: :error
|
defp handle_incoming_normalized(_, _), do: :error
|
||||||
|
|
||||||
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
||||||
def get_obj_helper(id, options \\ []) do
|
def get_obj_helper(id, options \\ []) do
|
||||||
|
|
34
lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex
Normal file
34
lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.Plugs.APClientApiEnabledPlug do
|
||||||
|
import Plug.Conn
|
||||||
|
import Phoenix.Controller, only: [text: 2]
|
||||||
|
|
||||||
|
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||||
|
@enabled_path [:activitypub, :client_api_enabled]
|
||||||
|
|
||||||
|
def init(options \\ []), do: Map.new(options)
|
||||||
|
|
||||||
|
def call(conn, %{allow_server: true}) do
|
||||||
|
if @config_impl.get(@enabled_path, false) do
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
|> assign(:user, nil)
|
||||||
|
|> assign(:token, nil)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, _) do
|
||||||
|
if @config_impl.get(@enabled_path, false) do
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
|> put_status(:forbidden)
|
||||||
|
|> text("C2S not enabled")
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -19,8 +19,16 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
|
||||||
options
|
options
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
def call(%{assigns: %{valid_signature: true}} = conn, _opts), do: conn
|
||||||
conn
|
|
||||||
|
# skip for C2S requests from authenticated users
|
||||||
|
def call(%{assigns: %{user: %Pleroma.User{}}} = conn, _opts) do
|
||||||
|
if get_format(conn) in ["json", "activity+json"] do
|
||||||
|
# ensure access token is provided for 2FA
|
||||||
|
Pleroma.Web.Plugs.EnsureAuthenticatedPlug.call(conn, %{})
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(conn, _opts) do
|
def call(conn, _opts) do
|
||||||
|
|
|
@ -907,22 +907,37 @@ defmodule Pleroma.Web.Router do
|
||||||
# Client to Server (C2S) AP interactions
|
# Client to Server (C2S) AP interactions
|
||||||
pipeline :activitypub_client do
|
pipeline :activitypub_client do
|
||||||
plug(:ap_service_actor)
|
plug(:ap_service_actor)
|
||||||
|
plug(Pleroma.Web.Plugs.APClientApiEnabledPlug)
|
||||||
plug(:fetch_session)
|
plug(:fetch_session)
|
||||||
plug(:authenticate)
|
plug(:authenticate)
|
||||||
plug(:after_auth)
|
plug(:after_auth)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# AP interactions used by both S2S and C2S
|
||||||
|
pipeline :activitypub_server_or_client do
|
||||||
|
plug(:ap_service_actor)
|
||||||
|
plug(:fetch_session)
|
||||||
|
plug(:authenticate)
|
||||||
|
plug(Pleroma.Web.Plugs.APClientApiEnabledPlug, allow_server: true)
|
||||||
|
plug(:after_auth)
|
||||||
|
plug(:http_signature)
|
||||||
|
end
|
||||||
|
|
||||||
scope "/", Pleroma.Web.ActivityPub do
|
scope "/", Pleroma.Web.ActivityPub do
|
||||||
pipe_through([:activitypub_client])
|
pipe_through([:activitypub_client])
|
||||||
|
|
||||||
get("/api/ap/whoami", ActivityPubController, :whoami)
|
get("/api/ap/whoami", ActivityPubController, :whoami)
|
||||||
get("/users/:nickname/inbox", ActivityPubController, :read_inbox)
|
get("/users/:nickname/inbox", ActivityPubController, :read_inbox)
|
||||||
|
|
||||||
get("/users/:nickname/outbox", ActivityPubController, :outbox)
|
|
||||||
post("/users/:nickname/outbox", ActivityPubController, :update_outbox)
|
post("/users/:nickname/outbox", ActivityPubController, :update_outbox)
|
||||||
post("/api/ap/upload_media", ActivityPubController, :upload_media)
|
post("/api/ap/upload_media", ActivityPubController, :upload_media)
|
||||||
|
end
|
||||||
|
|
||||||
|
scope "/", Pleroma.Web.ActivityPub do
|
||||||
|
pipe_through([:activitypub_server_or_client])
|
||||||
|
|
||||||
|
get("/users/:nickname/outbox", ActivityPubController, :outbox)
|
||||||
|
|
||||||
# The following two are S2S as well, see `ActivityPub.fetch_follow_information_for_user/1`:
|
|
||||||
get("/users/:nickname/followers", ActivityPubController, :followers)
|
get("/users/:nickname/followers", ActivityPubController, :followers)
|
||||||
get("/users/:nickname/following", ActivityPubController, :following)
|
get("/users/:nickname/following", ActivityPubController, :following)
|
||||||
get("/users/:nickname/collections/featured", ActivityPubController, :pinned)
|
get("/users/:nickname/collections/featured", ActivityPubController, :pinned)
|
||||||
|
|
2
mix.exs
2
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
||||||
def project do
|
def project do
|
||||||
[
|
[
|
||||||
app: :pleroma,
|
app: :pleroma,
|
||||||
version: version("2.8.0"),
|
version: version("2.9.0"),
|
||||||
elixir: "~> 1.14",
|
elixir: "~> 1.14",
|
||||||
elixirc_paths: elixirc_paths(Mix.env()),
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
compilers: Mix.compilers(),
|
compilers: Mix.compilers(),
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Emoji.PackTest do
|
defmodule Pleroma.Emoji.PackTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
alias Pleroma.Emoji
|
||||||
alias Pleroma.Emoji.Pack
|
alias Pleroma.Emoji.Pack
|
||||||
|
|
||||||
@emoji_path Path.join(
|
@emoji_path Path.join(
|
||||||
|
@ -53,6 +54,63 @@ defmodule Pleroma.Emoji.PackTest do
|
||||||
|
|
||||||
assert updated_pack.files_count == 5
|
assert updated_pack.files_count == 5
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "skips existing emojis when adding from zip file", %{pack: pack} do
|
||||||
|
# First, let's create a test pack with a "bear" emoji
|
||||||
|
test_pack_path = Path.join(@emoji_path, "test_bear_pack")
|
||||||
|
File.mkdir_p(test_pack_path)
|
||||||
|
|
||||||
|
# Create a pack.json file
|
||||||
|
File.write!(Path.join(test_pack_path, "pack.json"), """
|
||||||
|
{
|
||||||
|
"files": { "bear": "bear.png" },
|
||||||
|
"pack": {
|
||||||
|
"description": "Bear Pack", "homepage": "https://pleroma.social",
|
||||||
|
"license": "Test license", "share-files": true
|
||||||
|
}}
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Copy a test image to use as the bear emoji
|
||||||
|
File.cp!(
|
||||||
|
Path.absname("test/instance_static/emoji/test_pack/blank.png"),
|
||||||
|
Path.join(test_pack_path, "bear.png")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load the pack to register the "bear" emoji in the global registry
|
||||||
|
{:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack")
|
||||||
|
|
||||||
|
# Reload emoji to make sure the bear emoji is in the global registry
|
||||||
|
Emoji.reload()
|
||||||
|
|
||||||
|
# Verify that the bear emoji exists in the global registry
|
||||||
|
assert Emoji.exist?("bear")
|
||||||
|
|
||||||
|
# Now try to add a zip file that contains an emoji with the same shortcode
|
||||||
|
file = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
filename: "emojis.zip",
|
||||||
|
path: Path.absname("test/fixtures/emojis.zip")
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, updated_pack} = Pack.add_file(pack, nil, nil, file)
|
||||||
|
|
||||||
|
# Verify that the "bear" emoji was skipped
|
||||||
|
refute Map.has_key?(updated_pack.files, "bear")
|
||||||
|
|
||||||
|
# Other emojis should be added
|
||||||
|
assert Map.has_key?(updated_pack.files, "a_trusted_friend-128")
|
||||||
|
assert Map.has_key?(updated_pack.files, "auroraborealis")
|
||||||
|
assert Map.has_key?(updated_pack.files, "baby_in_a_box")
|
||||||
|
assert Map.has_key?(updated_pack.files, "bear-128")
|
||||||
|
|
||||||
|
# Total count should be 4 (all emojis except "bear")
|
||||||
|
assert updated_pack.files_count == 4
|
||||||
|
|
||||||
|
# Clean up the test pack
|
||||||
|
on_exit(fn ->
|
||||||
|
File.rm_rf!(test_pack_path)
|
||||||
|
end)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns error when zip file is bad", %{pack: pack} do
|
test "returns error when zip file is bad", %{pack: pack} do
|
||||||
|
|
499
test/pleroma/safe_zip_test.exs
Normal file
499
test/pleroma/safe_zip_test.exs
Normal file
|
@ -0,0 +1,499 @@
|
||||||
|
defmodule Pleroma.SafeZipTest do
|
||||||
|
# Not making this async because it creates and deletes files
|
||||||
|
use ExUnit.Case
|
||||||
|
|
||||||
|
alias Pleroma.SafeZip
|
||||||
|
|
||||||
|
@fixtures_dir "test/fixtures"
|
||||||
|
@tmp_dir "test/zip_tmp"
|
||||||
|
|
||||||
|
setup do
|
||||||
|
# Ensure tmp directory exists
|
||||||
|
File.mkdir_p!(@tmp_dir)
|
||||||
|
|
||||||
|
on_exit(fn ->
|
||||||
|
# Clean up any files created during tests
|
||||||
|
File.rm_rf!(@tmp_dir)
|
||||||
|
File.mkdir_p!(@tmp_dir)
|
||||||
|
end)
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "list_dir_file/1" do
|
||||||
|
test "lists files in a valid zip" do
|
||||||
|
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip"))
|
||||||
|
assert is_list(files)
|
||||||
|
assert length(files) > 0
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns an empty list for empty zip" do
|
||||||
|
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip"))
|
||||||
|
assert files == []
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error for non-existent file" do
|
||||||
|
assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip"))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only lists regular files, not directories" do
|
||||||
|
# Create a zip with both files and directories
|
||||||
|
zip_path = create_zip_with_directory()
|
||||||
|
|
||||||
|
# List files with SafeZip
|
||||||
|
{:ok, files} = SafeZip.list_dir_file(zip_path)
|
||||||
|
|
||||||
|
# Verify only regular files are listed, not directories
|
||||||
|
assert "file_in_dir/test_file.txt" in files
|
||||||
|
assert "root_file.txt" in files
|
||||||
|
|
||||||
|
# Directory entries should not be included in the list
|
||||||
|
refute "file_in_dir/" in files
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "contains_all_data?/2" do
|
||||||
|
test "returns true when all files are in the archive" do
|
||||||
|
# For this test, we'll create our own zip file with known content
|
||||||
|
# to ensure we can test the contains_all_data? function properly
|
||||||
|
zip_path = create_zip_with_directory()
|
||||||
|
archive_data = File.read!(zip_path)
|
||||||
|
|
||||||
|
# Check if the archive contains the root file
|
||||||
|
# Note: The function expects charlists (Erlang strings) in the MapSet
|
||||||
|
assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"]))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns false when files are missing" do
|
||||||
|
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||||
|
archive_data = File.read!(archive_path)
|
||||||
|
|
||||||
|
# Create a MapSet with non-existent files
|
||||||
|
fset = MapSet.new([~c"nonexistent.txt"])
|
||||||
|
|
||||||
|
refute SafeZip.contains_all_data?(archive_data, fset)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns false for invalid archive data" do
|
||||||
|
refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"]))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only checks for regular files, not directories" do
|
||||||
|
# Create a zip with both files and directories
|
||||||
|
zip_path = create_zip_with_directory()
|
||||||
|
archive_data = File.read!(zip_path)
|
||||||
|
|
||||||
|
# Check if the archive contains a directory (should return false)
|
||||||
|
refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"]))
|
||||||
|
|
||||||
|
# For this test, we'll manually check if the file exists in the archive
|
||||||
|
# by extracting it and verifying it exists
|
||||||
|
extract_dir = Path.join(@tmp_dir, "extract_check")
|
||||||
|
File.mkdir_p!(extract_dir)
|
||||||
|
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||||
|
|
||||||
|
# Verify the root file was extracted
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
Path.basename(file) == "root_file.txt"
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Verify the file exists on disk
|
||||||
|
assert File.exists?(Path.join(extract_dir, "root_file.txt"))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "zip/4" do
|
||||||
|
test "creates a zip file on disk" do
|
||||||
|
# Create a test file
|
||||||
|
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||||
|
File.write!(test_file_path, "test content")
|
||||||
|
|
||||||
|
# Create a zip file
|
||||||
|
zip_path = Path.join(@tmp_dir, "test.zip")
|
||||||
|
assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false)
|
||||||
|
|
||||||
|
# Verify the zip file exists
|
||||||
|
assert File.exists?(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "creates a zip file in memory" do
|
||||||
|
# Create a test file
|
||||||
|
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||||
|
File.write!(test_file_path, "test content")
|
||||||
|
|
||||||
|
# Create a zip file in memory
|
||||||
|
zip_name = Path.join(@tmp_dir, "test.zip")
|
||||||
|
|
||||||
|
assert {:ok, {^zip_name, zip_data}} =
|
||||||
|
SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true)
|
||||||
|
|
||||||
|
# Verify the zip data is binary
|
||||||
|
assert is_binary(zip_data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error for unsafe paths" do
|
||||||
|
# Try to zip a file with path traversal
|
||||||
|
assert {:error, _} =
|
||||||
|
SafeZip.zip(
|
||||||
|
Path.join(@tmp_dir, "test.zip"),
|
||||||
|
["../fixtures/test.txt"],
|
||||||
|
@tmp_dir,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "can create zip with directories" do
|
||||||
|
# Create a directory structure
|
||||||
|
dir_path = Path.join(@tmp_dir, "test_dir")
|
||||||
|
File.mkdir_p!(dir_path)
|
||||||
|
|
||||||
|
file_in_dir_path = Path.join(dir_path, "file_in_dir.txt")
|
||||||
|
File.write!(file_in_dir_path, "file in directory")
|
||||||
|
|
||||||
|
# Create a zip file
|
||||||
|
zip_path = Path.join(@tmp_dir, "dir_test.zip")
|
||||||
|
|
||||||
|
assert {:ok, ^zip_path} =
|
||||||
|
SafeZip.zip(
|
||||||
|
zip_path,
|
||||||
|
["test_dir/file_in_dir.txt"],
|
||||||
|
@tmp_dir,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the zip file exists
|
||||||
|
assert File.exists?(zip_path)
|
||||||
|
|
||||||
|
# Extract and verify the directory structure is preserved
|
||||||
|
extract_dir = Path.join(@tmp_dir, "extract")
|
||||||
|
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||||
|
|
||||||
|
# Check if the file path is in the list, accounting for possible full paths
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
String.ends_with?(file, "file_in_dir.txt")
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Verify the file exists in the expected location
|
||||||
|
assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"]))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "unzip_file/3" do
|
||||||
|
@tag :skip
|
||||||
|
test "extracts files from a zip archive" do
|
||||||
|
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||||
|
|
||||||
|
# Extract the archive
|
||||||
|
assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify files were extracted
|
||||||
|
assert is_list(files)
|
||||||
|
assert length(files) > 0
|
||||||
|
|
||||||
|
# Verify at least one file exists
|
||||||
|
first_file = List.first(files)
|
||||||
|
|
||||||
|
# Simply check that the file exists in the tmp directory
|
||||||
|
assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file)))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "extracts specific files from a zip archive" do
|
||||||
|
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||||
|
|
||||||
|
# Get list of files in the archive
|
||||||
|
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||||
|
file_to_extract = List.first(all_files)
|
||||||
|
|
||||||
|
# Extract only one file
|
||||||
|
assert {:ok, [extracted_file]} =
|
||||||
|
SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract])
|
||||||
|
|
||||||
|
# Verify only the specified file was extracted
|
||||||
|
assert Path.basename(extracted_file) == Path.basename(file_to_extract)
|
||||||
|
|
||||||
|
# Check that the file exists in the tmp directory
|
||||||
|
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error for invalid zip file" do
|
||||||
|
invalid_path = Path.join(@tmp_dir, "invalid.zip")
|
||||||
|
File.write!(invalid_path, "not a zip file")
|
||||||
|
|
||||||
|
assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "creates directories when extracting files in subdirectories" do
|
||||||
|
# Create a zip with files in subdirectories
|
||||||
|
zip_path = create_zip_with_directory()
|
||||||
|
|
||||||
|
# Extract the archive
|
||||||
|
assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify files were extracted - handle both relative and absolute paths
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
Path.basename(file) == "test_file.txt" &&
|
||||||
|
String.contains?(file, "file_in_dir")
|
||||||
|
end)
|
||||||
|
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
Path.basename(file) == "root_file.txt"
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Verify directory was created
|
||||||
|
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||||
|
assert File.exists?(dir_path)
|
||||||
|
assert File.dir?(dir_path)
|
||||||
|
|
||||||
|
# Verify file in directory was extracted
|
||||||
|
file_path = Path.join(dir_path, "test_file.txt")
|
||||||
|
assert File.exists?(file_path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "unzip_data/3" do
|
||||||
|
@tag :skip
|
||||||
|
test "extracts files from zip data" do
|
||||||
|
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||||
|
archive_data = File.read!(archive_path)
|
||||||
|
|
||||||
|
# Extract the archive from data
|
||||||
|
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify files were extracted
|
||||||
|
assert is_list(files)
|
||||||
|
assert length(files) > 0
|
||||||
|
|
||||||
|
# Verify at least one file exists
|
||||||
|
first_file = List.first(files)
|
||||||
|
|
||||||
|
# Simply check that the file exists in the tmp directory
|
||||||
|
assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file)))
|
||||||
|
end
|
||||||
|
|
||||||
|
@tag :skip
|
||||||
|
test "extracts specific files from zip data" do
|
||||||
|
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||||
|
archive_data = File.read!(archive_path)
|
||||||
|
|
||||||
|
# Get list of files in the archive
|
||||||
|
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||||
|
file_to_extract = List.first(all_files)
|
||||||
|
|
||||||
|
# Extract only one file
|
||||||
|
assert {:ok, extracted_files} =
|
||||||
|
SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract])
|
||||||
|
|
||||||
|
# Verify only the specified file was extracted
|
||||||
|
assert Enum.any?(extracted_files, fn path ->
|
||||||
|
Path.basename(path) == Path.basename(file_to_extract)
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Simply check that the file exists in the tmp directory
|
||||||
|
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error for invalid zip data" do
|
||||||
|
assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "creates directories when extracting files in subdirectories from data" do
|
||||||
|
# Create a zip with files in subdirectories
|
||||||
|
zip_path = create_zip_with_directory()
|
||||||
|
archive_data = File.read!(zip_path)
|
||||||
|
|
||||||
|
# Extract the archive from data
|
||||||
|
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify files were extracted - handle both relative and absolute paths
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
Path.basename(file) == "test_file.txt" &&
|
||||||
|
String.contains?(file, "file_in_dir")
|
||||||
|
end)
|
||||||
|
|
||||||
|
assert Enum.any?(files, fn file ->
|
||||||
|
Path.basename(file) == "root_file.txt"
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Verify directory was created
|
||||||
|
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||||
|
assert File.exists?(dir_path)
|
||||||
|
assert File.dir?(dir_path)
|
||||||
|
|
||||||
|
# Verify file in directory was extracted
|
||||||
|
file_path = Path.join(dir_path, "test_file.txt")
|
||||||
|
assert File.exists?(file_path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Security tests
|
||||||
|
describe "security checks" do
|
||||||
|
test "prevents path traversal in zip extraction" do
|
||||||
|
# Create a malicious zip file with path traversal
|
||||||
|
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||||
|
|
||||||
|
# Try to extract it with SafeZip
|
||||||
|
assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify the file was not extracted outside the target directory
|
||||||
|
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "prevents directory traversal in zip listing" do
|
||||||
|
# Create a malicious zip file with path traversal
|
||||||
|
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||||
|
|
||||||
|
# Try to list files with SafeZip
|
||||||
|
assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "prevents path traversal in zip data extraction" do
|
||||||
|
# Create a malicious zip file with path traversal
|
||||||
|
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||||
|
malicious_data = File.read!(malicious_zip_path)
|
||||||
|
|
||||||
|
# Try to extract it with SafeZip
|
||||||
|
assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir)
|
||||||
|
|
||||||
|
# Verify the file was not extracted outside the target directory
|
||||||
|
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles zip bomb attempts" do
|
||||||
|
# Create a zip bomb (a zip with many files or large files)
|
||||||
|
zip_bomb_path = create_zip_bomb()
|
||||||
|
|
||||||
|
# The SafeZip module should handle this gracefully
|
||||||
|
# Either by successfully extracting it (if it's not too large)
|
||||||
|
# or by returning an error (if it detects a potential zip bomb)
|
||||||
|
result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir)
|
||||||
|
|
||||||
|
case result do
|
||||||
|
{:ok, _} ->
|
||||||
|
# If it successfully extracts, make sure it didn't fill up the disk
|
||||||
|
# This is a simple check to ensure the extraction was controlled
|
||||||
|
assert File.exists?(@tmp_dir)
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
# If it returns an error, that's also acceptable
|
||||||
|
# The important thing is that it doesn't crash or hang
|
||||||
|
assert true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles deeply nested directory structures" do
|
||||||
|
# Create a zip with deeply nested directories
|
||||||
|
deep_nest_path = create_deeply_nested_zip()
|
||||||
|
|
||||||
|
# The SafeZip module should handle this gracefully
|
||||||
|
result = SafeZip.unzip_file(deep_nest_path, @tmp_dir)
|
||||||
|
|
||||||
|
case result do
|
||||||
|
{:ok, files} ->
|
||||||
|
# If it successfully extracts, verify the files were extracted
|
||||||
|
assert is_list(files)
|
||||||
|
assert length(files) > 0
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
# If it returns an error, that's also acceptable
|
||||||
|
# The important thing is that it doesn't crash or hang
|
||||||
|
assert true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Helper functions to create test fixtures
|
||||||
|
|
||||||
|
# Creates a zip file with a path traversal attempt
|
||||||
|
defp create_malicious_zip_with_path_traversal do
|
||||||
|
malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip")
|
||||||
|
|
||||||
|
# Create a file to include in the zip
|
||||||
|
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||||
|
File.write!(test_file_path, "malicious content")
|
||||||
|
|
||||||
|
# Use Erlang's zip module directly to create a zip with path traversal
|
||||||
|
{:ok, charlist_path} =
|
||||||
|
:zip.create(
|
||||||
|
String.to_charlist(malicious_zip_path),
|
||||||
|
[{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}]
|
||||||
|
)
|
||||||
|
|
||||||
|
to_string(charlist_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Creates a zip file with directory entries
|
||||||
|
defp create_zip_with_directory do
|
||||||
|
zip_path = Path.join(@tmp_dir, "with_directory.zip")
|
||||||
|
|
||||||
|
# Create files to include in the zip
|
||||||
|
root_file_path = Path.join(@tmp_dir, "root_file.txt")
|
||||||
|
File.write!(root_file_path, "root file content")
|
||||||
|
|
||||||
|
# Create a directory and a file in it
|
||||||
|
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||||
|
File.mkdir_p!(dir_path)
|
||||||
|
|
||||||
|
file_in_dir_path = Path.join(dir_path, "test_file.txt")
|
||||||
|
File.write!(file_in_dir_path, "file in directory content")
|
||||||
|
|
||||||
|
# Use Erlang's zip module to create a zip with directory structure
|
||||||
|
{:ok, charlist_path} =
|
||||||
|
:zip.create(
|
||||||
|
String.to_charlist(zip_path),
|
||||||
|
[
|
||||||
|
{String.to_charlist("root_file.txt"), File.read!(root_file_path)},
|
||||||
|
{String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
to_string(charlist_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Creates a zip bomb (a zip with many small files)
|
||||||
|
defp create_zip_bomb do
|
||||||
|
zip_path = Path.join(@tmp_dir, "zip_bomb.zip")
|
||||||
|
|
||||||
|
# Create a small file to duplicate many times
|
||||||
|
small_file_path = Path.join(@tmp_dir, "small_file.txt")
|
||||||
|
File.write!(small_file_path, String.duplicate("A", 100))
|
||||||
|
|
||||||
|
# Create a list of many files to include in the zip
|
||||||
|
file_entries =
|
||||||
|
for i <- 1..100 do
|
||||||
|
{String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Use Erlang's zip module to create a zip with many files
|
||||||
|
{:ok, charlist_path} =
|
||||||
|
:zip.create(
|
||||||
|
String.to_charlist(zip_path),
|
||||||
|
file_entries
|
||||||
|
)
|
||||||
|
|
||||||
|
to_string(charlist_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Creates a zip with deeply nested directories
|
||||||
|
defp create_deeply_nested_zip do
|
||||||
|
zip_path = Path.join(@tmp_dir, "deep_nest.zip")
|
||||||
|
|
||||||
|
# Create a file to include in the zip
|
||||||
|
file_content = "test content"
|
||||||
|
|
||||||
|
# Create a list of deeply nested files
|
||||||
|
file_entries =
|
||||||
|
for i <- 1..10 do
|
||||||
|
nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end)
|
||||||
|
{String.to_charlist("#{nested_path}/file.txt"), file_content}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Use Erlang's zip module to create a zip with deeply nested directories
|
||||||
|
{:ok, charlist_path} =
|
||||||
|
:zip.create(
|
||||||
|
String.to_charlist(zip_path),
|
||||||
|
file_entries
|
||||||
|
)
|
||||||
|
|
||||||
|
to_string(charlist_path)
|
||||||
|
end
|
||||||
|
end
|
|
@ -1344,6 +1344,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "GET /users/:nickname/outbox" do
|
describe "GET /users/:nickname/outbox" do
|
||||||
|
setup do
|
||||||
|
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Config)
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
test "it paginates correctly", %{conn: conn} do
|
test "it paginates correctly", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
conn = assign(conn, :user, user)
|
conn = assign(conn, :user, user)
|
||||||
|
@ -1432,6 +1437,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
assert %{"orderedItems" => []} = resp
|
assert %{"orderedItems" => []} = resp
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "it does not return a local note activity when C2S API is disabled", %{conn: conn} do
|
||||||
|
clear_config([:activitypub, :client_api_enabled], false)
|
||||||
|
user = insert(:user)
|
||||||
|
reader = insert(:user)
|
||||||
|
{:ok, _note_activity} = CommonAPI.post(user, %{status: "mew mew", visibility: "local"})
|
||||||
|
|
||||||
|
resp =
|
||||||
|
conn
|
||||||
|
|> assign(:user, reader)
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> get("/users/#{user.nickname}/outbox?page=true")
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert %{"orderedItems" => []} = resp
|
||||||
|
end
|
||||||
|
|
||||||
test "it returns a note activity in a collection", %{conn: conn} do
|
test "it returns a note activity in a collection", %{conn: conn} do
|
||||||
note_activity = insert(:note_activity)
|
note_activity = insert(:note_activity)
|
||||||
note_object = Object.normalize(note_activity, fetch: false)
|
note_object = Object.normalize(note_activity, fetch: false)
|
||||||
|
@ -1483,6 +1504,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
assert [answer_outbox] = outbox_get["orderedItems"]
|
assert [answer_outbox] = outbox_get["orderedItems"]
|
||||||
assert answer_outbox["id"] == activity.data["id"]
|
assert answer_outbox["id"] == activity.data["id"]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "it works with authorized fetch forced when authenticated" do
|
||||||
|
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||||
|
|
||||||
|
user = insert(:user)
|
||||||
|
outbox_endpoint = user.ap_id <> "/outbox"
|
||||||
|
|
||||||
|
conn =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> get(outbox_endpoint)
|
||||||
|
|
||||||
|
assert json_response(conn, 200)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it fails with authorized fetch forced when unauthenticated", %{conn: conn} do
|
||||||
|
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||||
|
|
||||||
|
user = insert(:user)
|
||||||
|
outbox_endpoint = user.ap_id <> "/outbox"
|
||||||
|
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> get(outbox_endpoint)
|
||||||
|
|
||||||
|
assert response(conn, 401)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "POST /users/:nickname/outbox (C2S)" do
|
describe "POST /users/:nickname/outbox (C2S)" do
|
||||||
|
@ -2153,6 +2203,30 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||||
|> json_response(403)
|
|> json_response(403)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "they don't work when C2S API is disabled", %{conn: conn} do
|
||||||
|
clear_config([:activitypub, :client_api_enabled], false)
|
||||||
|
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
assert conn
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get("/api/ap/whoami")
|
||||||
|
|> response(403)
|
||||||
|
|
||||||
|
desc = "Description of the image"
|
||||||
|
|
||||||
|
image = %Plug.Upload{
|
||||||
|
content_type: "image/jpeg",
|
||||||
|
path: Path.absname("test/fixtures/image.jpg"),
|
||||||
|
filename: "an_image.jpg"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert conn
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||||
|
|> response(403)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
test "pinned collection", %{conn: conn} do
|
test "pinned collection", %{conn: conn} do
|
||||||
|
|
|
@ -13,6 +13,23 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
describe "attachments" do
|
describe "attachments" do
|
||||||
|
test "works with apng" do
|
||||||
|
attachment =
|
||||||
|
%{
|
||||||
|
"mediaType" => "image/apng",
|
||||||
|
"name" => "",
|
||||||
|
"type" => "Document",
|
||||||
|
"url" =>
|
||||||
|
"https://media.misskeyusercontent.com/io/2859c26e-cd43-4550-848b-b6243bc3fe28.apng"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, attachment} =
|
||||||
|
AttachmentValidator.cast_and_validate(attachment)
|
||||||
|
|> Ecto.Changeset.apply_action(:insert)
|
||||||
|
|
||||||
|
assert attachment.mediaType == "image/apng"
|
||||||
|
end
|
||||||
|
|
||||||
test "fails without url" do
|
test "fails without url" do
|
||||||
attachment = %{
|
attachment = %{
|
||||||
"mediaType" => "",
|
"mediaType" => "",
|
||||||
|
|
|
@ -156,6 +156,246 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
||||||
# It fetched the quoted post
|
# It fetched the quoted post
|
||||||
assert Object.normalize("https://misskey.io/notes/8vs6wxufd0")
|
assert Object.normalize("https://misskey.io/notes/8vs6wxufd0")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "doesn't allow remote edits to fake local likes" do
|
||||||
|
# as a spot check for no internal fields getting injected
|
||||||
|
now = DateTime.utc_now()
|
||||||
|
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||||
|
edit_date = DateTime.to_iso8601(now)
|
||||||
|
|
||||||
|
local_user = insert(:user)
|
||||||
|
|
||||||
|
create_data = %{
|
||||||
|
"type" => "Create",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||||
|
"actor" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"object" => %{
|
||||||
|
"type" => "Note",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||||
|
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"published" => pub_date,
|
||||||
|
"content" => "miaow",
|
||||||
|
"likes" => [local_user.ap_id]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
update_data =
|
||||||
|
create_data
|
||||||
|
|> Map.put("type", "Update")
|
||||||
|
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||||
|
|> put_in(["object", "content"], "miaow :3")
|
||||||
|
|> put_in(["object", "updated"], edit_date)
|
||||||
|
|> put_in(["object", "formerRepresentations"], %{
|
||||||
|
"type" => "OrderedCollection",
|
||||||
|
"totalItems" => 1,
|
||||||
|
"orderedItems" => [create_data["object"]]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
assert object.data["content"] == "miaow"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||||
|
assert object.data["content"] == "miaow :3"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
end
|
||||||
|
|
||||||
|
test "strips internal fields from history items in edited notes" do
|
||||||
|
now = DateTime.utc_now()
|
||||||
|
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||||
|
edit_date = DateTime.to_iso8601(now)
|
||||||
|
|
||||||
|
local_user = insert(:user)
|
||||||
|
|
||||||
|
create_data = %{
|
||||||
|
"type" => "Create",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||||
|
"actor" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"object" => %{
|
||||||
|
"type" => "Note",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||||
|
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"published" => pub_date,
|
||||||
|
"content" => "miaow",
|
||||||
|
"likes" => [],
|
||||||
|
"like_count" => 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
update_data =
|
||||||
|
create_data
|
||||||
|
|> Map.put("type", "Update")
|
||||||
|
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||||
|
|> put_in(["object", "content"], "miaow :3")
|
||||||
|
|> put_in(["object", "updated"], edit_date)
|
||||||
|
|> put_in(["object", "formerRepresentations"], %{
|
||||||
|
"type" => "OrderedCollection",
|
||||||
|
"totalItems" => 1,
|
||||||
|
"orderedItems" => [
|
||||||
|
Map.merge(create_data["object"], %{
|
||||||
|
"likes" => [local_user.ap_id],
|
||||||
|
"like_count" => 1,
|
||||||
|
"pleroma" => %{"internal_field" => "should_be_stripped"}
|
||||||
|
})
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
assert object.data["content"] == "miaow"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||||
|
assert object.data["content"] == "miaow :3"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
|
# Check that internal fields are stripped from history items
|
||||||
|
history_item = List.first(object.data["formerRepresentations"]["orderedItems"])
|
||||||
|
assert history_item["likes"] == []
|
||||||
|
assert history_item["like_count"] == 0
|
||||||
|
refute Map.has_key?(history_item, "pleroma")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "doesn't trip over remote likes in notes" do
|
||||||
|
now = DateTime.utc_now()
|
||||||
|
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||||
|
edit_date = DateTime.to_iso8601(now)
|
||||||
|
|
||||||
|
create_data = %{
|
||||||
|
"type" => "Create",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/activity",
|
||||||
|
"actor" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"object" => %{
|
||||||
|
"type" => "Note",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097",
|
||||||
|
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"published" => pub_date,
|
||||||
|
"content" => "miaow",
|
||||||
|
"likes" => %{
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/likes",
|
||||||
|
"totalItems" => 0,
|
||||||
|
"type" => "Collection"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
update_data =
|
||||||
|
create_data
|
||||||
|
|> Map.put("type", "Update")
|
||||||
|
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||||
|
|> put_in(["object", "content"], "miaow :3")
|
||||||
|
|> put_in(["object", "updated"], edit_date)
|
||||||
|
|> put_in(["object", "likes", "totalItems"], 666)
|
||||||
|
|> put_in(["object", "formerRepresentations"], %{
|
||||||
|
"type" => "OrderedCollection",
|
||||||
|
"totalItems" => 1,
|
||||||
|
"orderedItems" => [create_data["object"]]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
assert object.data["content"] == "miaow"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||||
|
assert object.data["content"] == "miaow :3"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
# in the future this should retain remote likes, but for now:
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
end
|
||||||
|
|
||||||
|
test "doesn't trip over remote likes in polls" do
|
||||||
|
now = DateTime.utc_now()
|
||||||
|
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||||
|
edit_date = DateTime.to_iso8601(now)
|
||||||
|
|
||||||
|
create_data = %{
|
||||||
|
"type" => "Create",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/activity",
|
||||||
|
"actor" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"object" => %{
|
||||||
|
"type" => "Question",
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073",
|
||||||
|
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"cc" => [],
|
||||||
|
"published" => pub_date,
|
||||||
|
"content" => "vote!",
|
||||||
|
"anyOf" => [
|
||||||
|
%{
|
||||||
|
"type" => "Note",
|
||||||
|
"name" => "a",
|
||||||
|
"replies" => %{
|
||||||
|
"type" => "Collection",
|
||||||
|
"totalItems" => 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
"type" => "Note",
|
||||||
|
"name" => "b",
|
||||||
|
"replies" => %{
|
||||||
|
"type" => "Collection",
|
||||||
|
"totalItems" => 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"likes" => %{
|
||||||
|
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/likes",
|
||||||
|
"totalItems" => 0,
|
||||||
|
"type" => "Collection"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
update_data =
|
||||||
|
create_data
|
||||||
|
|> Map.put("type", "Update")
|
||||||
|
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||||
|
|> put_in(["object", "content"], "vote now!")
|
||||||
|
|> put_in(["object", "updated"], edit_date)
|
||||||
|
|> put_in(["object", "likes", "totalItems"], 666)
|
||||||
|
|> put_in(["object", "formerRepresentations"], %{
|
||||||
|
"type" => "OrderedCollection",
|
||||||
|
"totalItems" => 1,
|
||||||
|
"orderedItems" => [create_data["object"]]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
assert object.data["content"] == "vote!"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
|
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||||
|
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||||
|
assert object.data["content"] == "vote now!"
|
||||||
|
assert object.data["likes"] == []
|
||||||
|
# in the future this should retain remote likes, but for now:
|
||||||
|
assert object.data["like_count"] == 0
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "prepare outgoing" do
|
describe "prepare outgoing" do
|
||||||
|
|
Loading…
Reference in a new issue