Initial commit

This commit is contained in:
silverpill 2021-04-09 00:22:17 +00:00
commit fdef4b6e6a
92 changed files with 10405 additions and 0 deletions

16
.editorconfig Normal file
View file

@ -0,0 +1,16 @@
[*]
charset = utf-8
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.rs]
indent_size = 4
[*.yaml]
indent_size = 2
[*.md]
indent_size = 2
max_line_length = off
trim_trailing_whitespace = false

2
.env Normal file
View file

@ -0,0 +1,2 @@
# Allowed values: development, production
ENVIRONMENT=development

4
.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
.env.local
config.yaml
/files
/target

3883
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

77
Cargo.toml Normal file
View file

@ -0,0 +1,77 @@
[package]
name = "mitra"
version = "0.1.0"
description = "Mitra backend"
license = "AGPL-3.0"
edition = "2018"
publish = false
default-run = "mitra"
[dependencies]
# Used to handle incoming HTTP requests
actix-cors = "0.5.4"
actix-files = "0.5.0"
actix-session = "0.4.1"
actix-web = "3.3.2"
# Used for managing async tasks
actix-rt = "1.1.1"
# Used for HTML sanitization
ammonia = "3.1.2"
# Used for working with RSA keys, HTTP signatures and file uploads
base64 = "0.13.0"
# Used for working with dates
chrono = { version = "0.4.19", features = ["serde"] }
# Used to build admin CLI tool
clap = { version = "3.0.0-beta.2", default-features = false, features = ["std", "derive"] }
# Used for pooling database connections
deadpool = "0.7.0"
deadpool-postgres = { version = "0.5.6", default-features = false }
# Used to read .env files
dotenv = "0.15.0"
# Used to work with hexadecimal strings
hex = "0.4.3"
# Used for logging
log = "0.4.14"
env_logger = { version = "0.8.4", default-features = false }
# Used to guess media type of a file
mime_guess = "2.0.3"
mime-sniffer = "0.1.2"
# Used to determine the number of CPUs on the system
num_cpus = "1.13.0"
# Used to map postgres types to rust types
postgres-types = { version = "0.1.2", features = ["derive", "with-chrono-0_4", "with-uuid-0_8", "with-serde_json-1"] }
# Used for working with regular expressions
regex = "1.5.4"
# Used to generate random numbers
rand = "0.8.3"
# Used for managing database migrations
refinery = { version = "0.4.0", features = ["tokio-postgres"] }
# Used for making async HTTP requests
reqwest = { version = "0.10.10", features = ["json"] }
# Used for working with RSA keys
rsa = "0.5.0"
# Used for hashing passwords
rust-argon2 = "0.8.3"
# Used for working with ethereum keys
secp256k1 = { version = "0.20.3", features = ["rand", "rand-std"] }
# Used for serialization/deserialization
# https://github.com/rust-db/refinery/issues/160
serde = { version = "=1.0.117", features = ["derive"] }
serde_json = "1.0"
# Used to parse config file
serde_yaml = "0.8.17"
# Used to calculate SHA2 hashes
sha2 = "0.9.5"
# Used for creating error types
thiserror = "1.0.24"
# Async runtime ( required for #[tokio::main] )
tokio = { version = "0.2.25", features = ["macros"] }
# Used for working with Postgresql database (compatible with tokio 0.2)
tokio-postgres = { version = "0.5.5", features = ["with-chrono-0_4", "with-uuid-0_8", "with-serde_json-1"] }
# Used to work with URLs
url = "2.2.2"
# Used to work with UUIDs
uuid = { version = "0.8.2", features = ["serde", "v4"] }
# Used to query ethereum node
web3 = { version = "0.15.0", default-features = false, features = ["http", "http-tls", "signing"] }

661
LICENSE Normal file
View file

@ -0,0 +1,661 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

114
README.md Normal file
View file

@ -0,0 +1,114 @@
# Mitra
Federated social network with smart contracts.
- Built on [ActivityPub](https://activitypub.rocks/) protocol.
- Lightweight.
- Sign-in with Ethereum.
- Converting posts into NFTs.
- More crypto features in the future.
**WIP: Mitra is not ready for production yet.**
Demo instance: https://test.mitra.pm/ (invite-only)
## Requirements
- Rust 1.51+
- Postgresql
- IPFS node (optional)
- Ethereum node (optional)
## Development
### Create database
```
docker-compose up
```
Test connection:
```
psql -h localhost -p 5432 -U mitra mitra
```
### Run web service
Create config file:
```
cp config.yaml.example config.yaml
```
Compile and run service:
```
cargo run
```
### Run CLI
```
cargo run --bin mitractl
```
### Build for production
```
cargo build --release
```
## API
### Mastodon API
Endpoints are similar to Mastodon API:
```
GET /api/v1/accounts/{account_id}
PATCH /api/v1/accounts/update_credentials
GET /api/v1/accounts/relationships
POST /api/v1/accounts/{account_id}/follow
POST /api/v1/accounts/{account_id}/unfollow
GET /api/v1/directory
GET /api/v1/instance
POST /api/v1/media
GET /api/v2/search
POST /api/v1/statuses
GET /api/v1/statuses/{status_id}
GET /api/v1/timelines/home
```
Extra APIs:
```
POST /api/v1/statuses/{status_id}/make_permanent
GET /api/v1/statuses/{status_id}/signature
```
## CLI commands
Delete profile:
```
mitractl delete-profile -i 55a3005f-f293-4168-ab70-6ab09a879679
```
Generate invite code:
```
mitractl generate-invite-code
```
List generated invites:
```
mitractl list-invite-codes
```
Generate ethereum address:
```
mitractl generate-ethereum-address
```

29
config.yaml.example Normal file
View file

@ -0,0 +1,29 @@
database_url: postgres://mitra:mitra@127.0.0.1:5432/mitra
http_host: '127.0.0.1'
http_port: 8380
# 32 symbols or more
cookie_secret_key: null
# domain name
instance_uri: myserver.net
instance_title: myserver
instance_short_description: myserver is a federated social network
# Long description can contain markdown syntax
instance_description: myserver is a federated social network
registrations_open: false
# Login message must contain instance URL
login_message: 'Sign this message to log in to https://myserver.net. Do not sign this message on other sites!'
ethereum_json_rpc_url: 'http://127.0.0.1:8545'
# Block explorer base URL (must be compatible with https://eips.ethereum.org/EIPS/eip-3091)
ethereum_explorer_url: null
ethereum_contract:
address: '0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512'
chain_id: 31337
signing_key: null
ipfs_api_url: 'http://127.0.0.1:5001'
# IPFS gateway for clients
ipfs_gateway_url: 'https://ipfs.io'

433
contracts/Collectible.json Normal file

File diff suppressed because one or more lines are too long

151
contracts/Minter.json Normal file

File diff suppressed because one or more lines are too long

17
docker-compose.yaml Normal file
View file

@ -0,0 +1,17 @@
version: "3"
services:
database:
image: postgres:latest
restart: always
environment:
POSTGRES_PASSWORD: mitra
POSTGRES_USER: mitra
POSTGRES_DB: mitra
ports:
- "5432:5432"
volumes:
- mitra_postgres:/var/lib/postgresql/data
volumes:
mitra_postgres:

View file

@ -0,0 +1,62 @@
CREATE TABLE actor_profile (
id UUID PRIMARY KEY,
username VARCHAR(100) NOT NULL,
display_name VARCHAR(100),
acct VARCHAR(200) UNIQUE NOT NULL,
bio TEXT,
bio_source TEXT,
avatar_file_name VARCHAR(100),
banner_file_name VARCHAR(100),
follower_count INTEGER NOT NULL CHECK (follower_count >= 0) DEFAULT 0,
following_count INTEGER NOT NULL CHECK (following_count >= 0) DEFAULT 0,
post_count INTEGER NOT NULL CHECK (post_count >= 0) DEFAULT 0,
actor_json JSONB,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE user_invite_code (
code VARCHAR(100) PRIMARY KEY,
used BOOLEAN NOT NULL DEFAULT FALSE
);
CREATE TABLE user_account (
id UUID PRIMARY KEY REFERENCES actor_profile (id) ON DELETE CASCADE,
wallet_address VARCHAR(100) UNIQUE NOT NULL,
password_hash VARCHAR(200) NOT NULL,
private_key TEXT NOT NULL,
invite_code VARCHAR(100) UNIQUE REFERENCES user_invite_code (code) ON DELETE SET NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE post (
id UUID PRIMARY KEY,
author_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
content TEXT NOT NULL,
ipfs_cid VARCHAR(200),
token_id INTEGER,
token_tx_id VARCHAR(200),
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE relationship (
source_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
target_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
PRIMARY KEY (source_id, target_id)
);
CREATE TABLE follow_request (
id UUID PRIMARY KEY,
source_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
target_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
request_status SMALLINT NOT NULL,
UNIQUE (source_id, target_id)
);
CREATE TABLE media_attachment (
id UUID PRIMARY KEY,
owner_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
media_type VARCHAR(50),
file_name VARCHAR(200) NOT NULL,
post_id UUID REFERENCES post (id) ON DELETE CASCADE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
)

62
migrations/schema.sql Normal file
View file

@ -0,0 +1,62 @@
CREATE TABLE actor_profile (
id UUID PRIMARY KEY,
username VARCHAR(100) NOT NULL,
display_name VARCHAR(100),
acct VARCHAR(200) UNIQUE NOT NULL,
bio TEXT,
bio_source TEXT,
avatar_file_name VARCHAR(100),
banner_file_name VARCHAR(100),
follower_count INTEGER NOT NULL CHECK (follower_count >= 0) DEFAULT 0,
following_count INTEGER NOT NULL CHECK (following_count >= 0) DEFAULT 0,
post_count INTEGER NOT NULL CHECK (post_count >= 0) DEFAULT 0,
actor_json JSONB,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE user_invite_code (
code VARCHAR(100) PRIMARY KEY,
used BOOLEAN NOT NULL DEFAULT FALSE
);
CREATE TABLE user_account (
id UUID PRIMARY KEY REFERENCES actor_profile (id) ON DELETE CASCADE,
wallet_address VARCHAR(100) UNIQUE NOT NULL,
password_hash VARCHAR(200) NOT NULL,
private_key TEXT NOT NULL,
invite_code VARCHAR(100) UNIQUE REFERENCES user_invite_code (code) ON DELETE SET NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE post (
id UUID PRIMARY KEY,
author_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
content TEXT NOT NULL,
ipfs_cid VARCHAR(200),
token_id INTEGER,
token_tx_id VARCHAR(200),
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
);
CREATE TABLE relationship (
source_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
target_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
PRIMARY KEY (source_id, target_id)
);
CREATE TABLE follow_request (
id UUID PRIMARY KEY,
source_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
target_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
request_status SMALLINT NOT NULL,
UNIQUE (source_id, target_id)
);
CREATE TABLE media_attachment (
id UUID PRIMARY KEY,
owner_id UUID NOT NULL REFERENCES actor_profile (id) ON DELETE CASCADE,
media_type VARCHAR(50),
file_name VARCHAR(200) NOT NULL,
post_id UUID REFERENCES post (id) ON DELETE CASCADE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()
)

263
src/activitypub/activity.rs Normal file
View file

@ -0,0 +1,263 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use uuid::Uuid;
use crate::config::Config;
use crate::models::posts::types::Post;
use crate::models::profiles::types::DbActorProfile;
use crate::utils::files::get_file_url;
use super::constants::{AP_CONTEXT, AP_PUBLIC};
use super::views::{get_actor_url, get_object_url};
use super::vocabulary::*;
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Attachment {
pub name: String,
#[serde(rename = "type")]
pub attachment_type: String,
pub media_type: String,
pub url: String,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Object {
#[serde(rename = "@context")]
pub context: Option<Value>,
pub id: String,
#[serde(rename = "type")]
pub object_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub actor: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub attachment: Option<Vec<Attachment>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub object: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub published: Option<DateTime<Utc>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub attributed_to: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub in_reply_to: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub to: Option<Value>,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Activity {
#[serde(rename = "@context")]
pub context: Value,
pub id: String,
#[serde(rename = "type")]
pub activity_type: String,
pub actor: String,
pub object: Value,
}
fn create_activity(
instance_url: &str,
actor_name: &str,
activity_type: &str,
activity_uuid: Option<Uuid>,
object: Value,
) -> Activity {
let actor_id = get_actor_url(
instance_url,
&actor_name,
);
let activity_id = get_object_url(
instance_url,
&activity_uuid.unwrap_or(Uuid::new_v4()),
);
let activity = Activity {
context: json!(AP_CONTEXT),
id: activity_id,
activity_type: activity_type.to_string(),
actor: actor_id,
object: object,
};
activity
}
pub fn create_activity_note(
config: &Config,
post: &Post,
) -> Activity {
let object_id = get_object_url(
&config.instance_url(),
&post.id,
);
let actor_id = get_actor_url(
&config.instance_url(),
&post.author.username,
);
let attachments: Vec<Attachment> = post.attachments.iter().map(|db_item| {
let url = get_file_url(&config.instance_url(), &db_item.file_name);
let media_type = db_item.media_type.clone().unwrap_or("".to_string());
Attachment {
name: "".to_string(),
attachment_type: DOCUMENT.to_string(),
media_type,
url,
}
}).collect();
let object = Object {
context: Some(json!(AP_CONTEXT)),
id: object_id,
object_type: NOTE.to_string(),
actor: None,
attachment: Some(attachments),
object: None,
published: Some(post.created_at),
attributed_to: Some(actor_id.clone()),
in_reply_to: None,
content: Some(post.content.clone()),
to: Some(json!(AP_PUBLIC)),
};
let activity = create_activity(
&config.instance_url(),
&post.author.username,
CREATE,
None,
serde_json::to_value(object).unwrap(),
);
activity
}
pub fn create_activity_follow(
config: &Config,
actor_profile: &DbActorProfile,
follow_request_id: &Uuid,
target_id: &str,
) -> Activity {
let object = Object {
context: Some(json!(AP_CONTEXT)),
id: target_id.to_owned(),
object_type: PERSON.to_string(),
actor: None,
attachment: None,
object: None,
published: None,
attributed_to: None,
in_reply_to: None,
content: None,
to: None,
};
let activity = create_activity(
&config.instance_url(),
&actor_profile.username,
FOLLOW,
Some(*follow_request_id),
serde_json::to_value(object).unwrap(),
);
activity
}
pub fn create_activity_accept_follow(
config: &Config,
actor_profile: &DbActorProfile,
follow_activity_id: &str,
) -> Activity {
// TODO: use received activity as object
let object = Object {
context: Some(json!(AP_CONTEXT)),
id: follow_activity_id.to_string(),
object_type: FOLLOW.to_string(),
actor: None,
attachment: None,
object: None,
published: None,
attributed_to: None,
in_reply_to: None,
content: None,
to: None,
};
let activity = create_activity(
&config.instance_url(),
&actor_profile.username,
ACCEPT,
None,
serde_json::to_value(object).unwrap(),
);
activity
}
pub fn create_activity_undo_follow(
config: &Config,
actor_profile: &DbActorProfile,
follow_request_id: &Uuid,
target_id: &str,
) -> Activity {
// TODO: retrieve 'Follow' activity from database
let follow_activity_id = get_object_url(
&config.instance_url(),
follow_request_id,
);
let follow_actor_id = get_actor_url(
&config.instance_url(),
&actor_profile.username,
);
let object = Object {
context: Some(json!(AP_CONTEXT)),
id: follow_activity_id,
object_type: FOLLOW.to_string(),
actor: Some(follow_actor_id),
attachment: None,
object: Some(target_id.to_owned()),
published: None,
attributed_to: None,
in_reply_to: None,
content: None,
to: None,
};
let activity = create_activity(
&config.instance_url(),
&actor_profile.username,
UNDO,
None,
serde_json::to_value(object).unwrap(),
);
activity
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct OrderedCollection {
#[serde(rename = "@context")]
pub context: Value,
pub id: String,
#[serde(rename = "type")]
pub object_type: String,
}
impl OrderedCollection {
pub fn new(collection_url: String) -> Self {
Self {
context: json!(AP_CONTEXT),
id: collection_url,
object_type: "OrderedCollection".to_string(),
}
}
}

139
src/activitypub/actor.rs Normal file
View file

@ -0,0 +1,139 @@
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use crate::config::Config;
use crate::errors::HttpError;
use crate::models::users::types::User;
use crate::utils::crypto::{deserialize_private_key, get_public_key_pem};
use crate::utils::files::get_file_url;
use super::constants::AP_CONTEXT;
use super::views::{
get_actor_url,
get_inbox_url,
get_outbox_url,
get_followers_url,
get_following_url,
};
use super::vocabulary::{PERSON, IMAGE};
const W3ID_CONTEXT: &str = "https://w3id.org/security/v1";
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct PublicKey {
id: String,
owner: String,
pub public_key_pem: String,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Image {
#[serde(rename = "type")]
object_type: String,
pub url: String,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ActorCapabilities {
accepts_chat_messages: Option<bool>,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Actor {
#[serde(rename = "@context")]
context: Option<Value>,
pub id: String,
#[serde(rename = "type")]
object_type: String,
pub name: String,
pub preferred_username: String,
pub inbox: String,
pub outbox: String,
pub followers: String,
pub following: String,
pub public_key: PublicKey,
#[serde(skip_serializing_if = "Option::is_none")]
pub capabilities: Option<ActorCapabilities>,
#[serde(skip_serializing_if = "Option::is_none")]
pub icon: Option<Image>,
#[serde(skip_serializing_if = "Option::is_none")]
pub image: Option<Image>,
#[serde(skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
}
pub fn get_actor_object(
config: &Config,
user: &User,
) -> Result<Actor, HttpError> {
let username = &user.profile.username;
let id = get_actor_url(&config.instance_url(), &username);
let inbox = get_inbox_url(&config.instance_url(), &username);
let outbox = get_outbox_url(&config.instance_url(), &username);
let followers = get_followers_url(&config.instance_url(), &username);
let following = get_following_url(&config.instance_url(), &username);
let private_key = deserialize_private_key(&user.private_key)
.map_err(|_| HttpError::InternalError)?;
let public_key_pem = get_public_key_pem(&private_key)
.map_err(|_| HttpError::InternalError)?;
let public_key = PublicKey {
id: format!("{}#main-key", id),
owner: id.clone(),
public_key_pem: public_key_pem,
};
let avatar = match &user.profile.avatar_file_name {
Some(file_name) => {
let image = Image {
object_type: IMAGE.to_string(),
url: get_file_url(&config.instance_url(), file_name),
};
Some(image)
},
None => None,
};
let banner = match &user.profile.banner_file_name {
Some(file_name) => {
let image = Image {
object_type: IMAGE.to_string(),
url: get_file_url(&config.instance_url(), file_name),
};
Some(image)
},
None => None,
};
let capabilities = ActorCapabilities {
accepts_chat_messages: Some(false),
};
let actor = Actor {
context: Some(json!([
AP_CONTEXT.to_string(),
W3ID_CONTEXT.to_string(),
])),
id,
object_type: PERSON.to_string(),
name: username.to_string(),
preferred_username: username.to_string(),
inbox,
outbox,
followers,
following,
public_key,
capabilities: Some(capabilities),
icon: avatar,
image: banner,
summary: None,
};
Ok(actor)
}

View file

@ -0,0 +1,3 @@
pub const ACTIVITY_CONTENT_TYPE: &str = "application/activity+json";
pub const AP_CONTEXT: &str = "https://www.w3.org/ns/activitystreams";
pub const AP_PUBLIC: &str = "https://www.w3.org/ns/activitystreams#Public";

View file

@ -0,0 +1,97 @@
use crate::config::{Environment, Config};
use crate::http_signatures::create::{create_http_signature, SignatureError};
use crate::models::users::types::User;
use crate::utils::crypto::deserialize_private_key;
use super::activity::Activity;
use super::actor::Actor;
use super::constants::ACTIVITY_CONTENT_TYPE;
use super::views::get_actor_url;
#[derive(thiserror::Error, Debug)]
pub enum DelivererError {
#[error("key error")]
KeyDeserializationError(#[from] rsa::pkcs8::Error),
#[error(transparent)]
SignatureError(#[from] SignatureError),
#[error("activity serialization error")]
SerializationError(#[from] serde_json::Error),
#[error(transparent)]
RequestError(#[from] reqwest::Error),
#[error("http error {0:?}")]
HttpError(reqwest::StatusCode),
}
async fn send_activity(
config: &Config,
sender: &User,
activity: &Activity,
inbox_url: &str,
) -> Result<(), DelivererError> {
let activity_json = serde_json::to_string(&activity)?;
log::info!("sending activity: {}", activity_json);
let actor_key = deserialize_private_key(&sender.private_key)?;
let actor_key_id = format!(
"{}#main-key",
get_actor_url(
&config.instance_url(),
&sender.profile.username,
),
);
let headers = create_http_signature(
&inbox_url,
&activity_json,
actor_key,
actor_key_id,
)?;
// Send
match config.environment {
Environment::Development => {
log::info!(
"development mode: not sending activity to {}",
inbox_url,
);
},
Environment::Production => {
let client = reqwest::Client::new();
// Default timeout is 30s
let response = client.post(inbox_url)
.header("Host", headers.host)
.header("Date", headers.date)
.header("Digest", headers.digest)
.header("Signature", headers.signature)
.header("Content-Type", ACTIVITY_CONTENT_TYPE)
.body(activity_json)
.send()
.await?;
let response_status = response.status();
let response_text = response.text().await?;
log::info!(
"remote server response: {}",
response_text,
);
if response_status.is_client_error() || response_status.is_server_error() {
return Err(DelivererError::HttpError(response_status));
}
},
};
Ok(())
}
pub async fn deliver_activity(
config: &Config,
sender: &User,
activity: Activity,
recipients: Vec<Actor>,
) -> () {
for actor in recipients {
// TODO: retry on error
if let Err(err) = send_activity(&config, &sender, &activity, &actor.inbox).await {
log::error!("{}", err);
}
};
}

120
src/activitypub/fetcher.rs Normal file
View file

@ -0,0 +1,120 @@
use std::path::PathBuf;
use serde_json::Value;
use crate::models::profiles::types::ProfileCreateData;
use crate::utils::files::{save_file, FileError};
use crate::webfinger::types::JsonResourceDescriptor;
use super::actor::Actor;
use super::constants::ACTIVITY_CONTENT_TYPE;
#[derive(thiserror::Error, Debug)]
pub enum FetchError {
#[error("invalid URL")]
UrlError(#[from] url::ParseError),
#[error(transparent)]
RequestError(#[from] reqwest::Error),
#[error("json parse error")]
JsonParseError(#[from] serde_json::Error),
#[error("file error")]
FileError(#[from] FileError),
#[error("{0}")]
OtherError(&'static str),
}
pub async fn fetch_avatar_and_banner(
actor: &Actor,
media_dir: &PathBuf,
) -> Result<(Option<String>, Option<String>), FetchError> {
let avatar = match &actor.icon {
Some(icon) => {
let file_name = fetch_attachment(
&icon.url,
media_dir,
).await?;
Some(file_name)
},
None => None,
};
let banner = match &actor.image {
Some(image) => {
let file_name = fetch_attachment(
&image.url,
media_dir,
).await?;
Some(file_name)
},
None => None,
};
Ok((avatar, banner))
}
pub async fn fetch_profile(
username: &str,
instance_uri: &str,
media_dir: &PathBuf,
) -> Result<ProfileCreateData, FetchError> {
let actor_address = format!("{}@{}", &username, &instance_uri);
let webfinger_account_uri = format!("acct:{}", actor_address);
// TOOD: support http
let webfinger_url = format!("https://{}/.well-known/webfinger", instance_uri);
let client = reqwest::Client::new();
let webfinger_data = client.get(&webfinger_url)
.query(&[("resource", webfinger_account_uri)])
.send().await?
.text().await?;
let jrd: JsonResourceDescriptor = serde_json::from_str(&webfinger_data)?;
let link = jrd.links.iter()
.find(|link| link.rel == "self")
.ok_or(FetchError::OtherError("self link not found"))?;
let actor_url = link.href.as_ref()
.ok_or(FetchError::OtherError("account href not found"))?;
fetch_profile_by_actor_id(actor_url, media_dir).await
}
pub async fn fetch_profile_by_actor_id(
actor_url: &str,
media_dir: &PathBuf,
) -> Result<ProfileCreateData, FetchError> {
let actor_host = url::Url::parse(actor_url)?
.host_str()
.ok_or(FetchError::OtherError("invalid URL"))?
.to_owned();
let client = reqwest::Client::new();
let actor_json = client.get(actor_url)
.header(reqwest::header::ACCEPT, ACTIVITY_CONTENT_TYPE)
.send().await?
.text().await?;
let actor_value: Value = serde_json::from_str(&actor_json)?;
let actor: Actor = serde_json::from_value(actor_value.clone())?;
let (avatar, banner) = fetch_avatar_and_banner(&actor, media_dir).await?;
let actor_address = format!(
"{}@{}",
actor.preferred_username,
actor_host,
);
let profile_data = ProfileCreateData {
username: actor.preferred_username,
display_name: Some(actor.name),
acct: actor_address,
bio: actor.summary,
avatar: avatar,
banner: banner,
actor: Some(actor_value),
};
Ok(profile_data)
}
pub async fn fetch_attachment(
url: &str,
output_dir: &PathBuf,
) -> Result<String, FetchError> {
let response = reqwest::get(url).await?;
let file_data = response.bytes().await?;
let file_name = save_file(file_data.to_vec(), output_dir)?;
Ok(file_name)
}

8
src/activitypub/mod.rs Normal file
View file

@ -0,0 +1,8 @@
pub mod activity;
pub mod actor;
pub mod constants;
pub mod deliverer;
pub mod fetcher;
mod receiver;
pub mod views;
mod vocabulary;

210
src/activitypub/receiver.rs Normal file
View file

@ -0,0 +1,210 @@
use regex::Regex;
use serde_json::Value;
use uuid::Uuid;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::{HttpError, ValidationError};
use crate::models::attachments::queries::create_attachment;
use crate::models::posts::types::PostCreateData;
use crate::models::posts::queries::create_post;
use crate::models::profiles::queries::{
get_profile_by_actor_id,
get_profile_by_acct,
update_profile,
};
use crate::models::profiles::types::ProfileUpdateData;
use crate::models::relationships::queries::{accept_follow_request, follow, unfollow};
use crate::models::users::queries::get_user_by_id;
use super::activity::{Object, Activity, create_activity_accept_follow};
use super::actor::Actor;
use super::deliverer::deliver_activity;
use super::fetcher::{fetch_avatar_and_banner, fetch_attachment};
use super::vocabulary::*;
fn parse_actor_id(actor_id: &str) -> Result<String, ValidationError> {
let url_regexp = Regex::new(r"^https?://.+/users/(?P<username>[0-9a-z_]+)$").unwrap();
let url_caps = url_regexp.captures(&actor_id)
.ok_or(ValidationError("invalid actor ID"))?;
let username = url_caps.name("username")
.ok_or(ValidationError("invalid actor ID"))?
.as_str()
.to_owned();
Ok(username)
}
fn parse_object_id(object_id: &str) -> Result<Uuid, ValidationError> {
let url_regexp = Regex::new(r"^https?://.+/objects/(?P<uuid>[0-9a-f-]+)$").unwrap();
let url_caps = url_regexp.captures(&object_id)
.ok_or(ValidationError("invalid object ID"))?;
let object_uuid: Uuid = url_caps.name("uuid")
.ok_or(ValidationError("invalid object ID"))?
.as_str().parse()
.map_err(|_| ValidationError("invalid object ID"))?;
Ok(object_uuid)
}
pub async fn receive_activity(
config: &Config,
db_pool: &Pool,
_username: String,
activity_raw: Value,
) -> Result<(), HttpError> {
let activity: Activity = serde_json::from_value(activity_raw)
.map_err(|_| ValidationError("invalid activity"))?;
let activity_type = activity.activity_type;
let object_type = activity.object.get("type")
.and_then(|val| val.as_str())
.unwrap_or("Unknown")
.to_owned();
let db_client = &mut **get_database_client(&db_pool).await?;
match (activity_type.as_str(), object_type.as_str()) {
(ACCEPT, FOLLOW) => {
let object: Object = serde_json::from_value(activity.object)
.map_err(|_| ValidationError("invalid object"))?;
// TODO: reject if object ID contains wrong instance URI
let follow_request_id = parse_object_id(&object.id)?;
accept_follow_request(db_client, &follow_request_id).await?;
},
(CREATE, NOTE) => {
let object: Object = serde_json::from_value(activity.object)
.map_err(|_| ValidationError("invalid object"))?;
let attributed_to = object.attributed_to
.ok_or(ValidationError("unattributed note"))?;
let author = get_profile_by_actor_id(db_client, &attributed_to).await?;
let content = object.content
.ok_or(ValidationError("no content"))?;
let mut attachments: Vec<Uuid> = Vec::new();
if let Some(list) = object.attachment {
let mut downloaded: Vec<(String, String)> = Vec::new();
let output_dir = config.media_dir();
for attachment in list {
let file_name = fetch_attachment(&attachment.url, &output_dir).await
.map_err(|_| ValidationError("failed to fetch attachment"))?;
log::info!("downloaded attachment {}", attachment.url);
downloaded.push((file_name, attachment.media_type));
}
for (file_name, media_type) in downloaded {
let db_attachment = create_attachment(
db_client,
&author.id,
Some(media_type),
file_name,
).await?;
attachments.push(db_attachment.id);
}
}
let post_data = PostCreateData {
content,
attachments: attachments,
created_at: object.published,
};
create_post(db_client, &author.id, post_data).await?;
},
(FOLLOW, _) => {
let source_profile = get_profile_by_actor_id(db_client, &activity.actor).await?;
let source_actor_value = source_profile.actor_json.ok_or(HttpError::InternalError)?;
let source_actor: Actor = serde_json::from_value(source_actor_value)
.map_err(|_| HttpError::InternalError)?;
let target_actor_id = activity.object.as_str()
.ok_or(ValidationError("invalid object"))?;
// TODO: reject if object ID contains wrong instance URI
let target_username = parse_actor_id(&target_actor_id)?;
let target_profile = get_profile_by_acct(db_client, &target_username).await?;
// Create and send 'Accept' activity
let target_user = get_user_by_id(db_client, &target_profile.id).await?;
let new_activity = create_activity_accept_follow(&config, &target_profile, &activity.id);
// Save relationship
follow(db_client, &source_profile.id, &target_profile.id).await?;
// Send activity
let recipients = vec![source_actor];
let config_clone = config.clone();
actix_rt::spawn(async move {
deliver_activity(
&config_clone,
&target_user,
new_activity,
recipients,
).await;
});
},
(UNDO, FOLLOW) => {
let object: Object = serde_json::from_value(activity.object)
.map_err(|_| ValidationError("invalid object"))?;
let source_profile = get_profile_by_actor_id(db_client, &activity.actor).await?;
let target_actor_id = object.object
.ok_or(ValidationError("invalid object"))?;
// TODO: reject if actor ID contains wrong instance URI
let target_username = parse_actor_id(&target_actor_id)?;
let target_profile = get_profile_by_acct(db_client, &target_username).await?;
unfollow(db_client, &source_profile.id, &target_profile.id).await?;
},
(UPDATE, PERSON) => {
let actor: Actor = serde_json::from_value(activity.object)
.map_err(|_| ValidationError("invalid actor data"))?;
let profile = get_profile_by_actor_id(db_client, &actor.id).await?;
let (avatar, banner) = fetch_avatar_and_banner(&actor, &config.media_dir()).await
.map_err(|_| ValidationError("failed to fetch image"))?;
let mut profile_data = ProfileUpdateData {
display_name: Some(actor.name),
bio: actor.summary.clone(),
bio_source: actor.summary,
avatar,
banner,
};
profile_data.clean()?;
update_profile(db_client, &profile.id, profile_data).await?;
},
_ => {
return Err(HttpError::ValidationError("activity type is not supported".into()));
},
};
log::info!(
"processed {}({}) from {}",
activity_type,
object_type,
activity.actor,
);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_actor_id() {
let username = parse_actor_id("https://example.org/users/test").unwrap();
assert_eq!(username, "test".to_string());
}
#[test]
fn test_parse_actor_id_wrong_path() {
let error = parse_actor_id("https://example.org/user/test").unwrap_err();
assert_eq!(error.to_string(), "invalid actor ID");
}
#[test]
fn test_parse_actor_id_invalid_username() {
let error = parse_actor_id("https://example.org/users/tes-t").unwrap_err();
assert_eq!(error.to_string(), "invalid actor ID");
}
#[test]
fn test_parse_object_id() {
let expected_uuid = Uuid::new_v4();
let object_id = format!(
"https://example.org/objects/{}",
expected_uuid,
);
let object_uuid = parse_object_id(&object_id).unwrap();
assert_eq!(object_uuid, expected_uuid);
}
#[test]
fn test_parse_object_id_invalid_uuid() {
let error = parse_object_id("https://example.org/objects/1234").unwrap_err();
assert_eq!(error.to_string(), "invalid object ID");
}
}

129
src/activitypub/views.rs Normal file
View file

@ -0,0 +1,129 @@
use actix_web::{
get, post, web,
HttpRequest, HttpResponse, Scope,
};
use serde::Deserialize;
use uuid::Uuid;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::http_signatures::verify::verify_http_signature;
use crate::models::users::queries::get_user_by_name;
use super::activity::OrderedCollection;
use super::actor::get_actor_object;
use super::constants::ACTIVITY_CONTENT_TYPE;
use super::receiver::receive_activity;
pub fn get_actor_url(instance_url: &str, username: &str) -> String {
format!("{}/users/{}", instance_url, username)
}
pub fn get_inbox_url(instance_url: &str, username: &str) -> String {
format!("{}/users/{}/inbox", instance_url, username)
}
pub fn get_outbox_url(instance_url: &str, username: &str) -> String {
format!("{}/users/{}/outbox", instance_url, username)
}
pub fn get_followers_url(instance_url: &str, username: &str) -> String {
format!("{}/users/{}/followers", instance_url, username)
}
pub fn get_following_url(instance_url: &str, username: &str) -> String {
format!("{}/users/{}/following", instance_url, username)
}
pub fn get_object_url(instance_url: &str, object_uuid: &Uuid) -> String {
format!("{}/objects/{}", instance_url, object_uuid)
}
#[get("")]
async fn get_actor(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
web::Path(username): web::Path<String>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let user = get_user_by_name(db_client, &username).await?;
let actor = get_actor_object(&config, &user)?;
let response = HttpResponse::Ok()
.content_type(ACTIVITY_CONTENT_TYPE)
.json(actor);
Ok(response)
}
#[post("/inbox")]
async fn inbox(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
request: HttpRequest,
web::Path(username): web::Path<String>,
activity: web::Json<serde_json::Value>,
) -> Result<HttpResponse, HttpError> {
log::info!("received to '{}' inbox: {}", username, activity);
if let Err(err) = verify_http_signature(&config, &db_pool, &request).await {
log::warn!("invalid signature: {}", err);
}
receive_activity(&config, &db_pool, username, activity.into_inner()).await?;
Ok(HttpResponse::Ok().body("success"))
}
#[derive(Deserialize)]
struct CollectionQueryParams {
page: Option<i32>,
}
#[get("/followers")]
async fn followers_collection(
config: web::Data<Config>,
web::Path(username): web::Path<String>,
query_params: web::Query<CollectionQueryParams>,
) -> Result<HttpResponse, HttpError> {
if query_params.page.is_some() {
// Social graph is not available
return Err(HttpError::PermissionError);
}
let collection_url = get_followers_url(&config.instance_url(), &username);
let collection = OrderedCollection::new(collection_url);
let response = HttpResponse::Ok()
.content_type(ACTIVITY_CONTENT_TYPE)
.json(collection);
Ok(response)
}
#[get("/following")]
async fn following_collection(
config: web::Data<Config>,
web::Path(username): web::Path<String>,
query_params: web::Query<CollectionQueryParams>,
) -> Result<HttpResponse, HttpError> {
if query_params.page.is_some() {
// Social graph is not available
return Err(HttpError::PermissionError);
}
let collection_url = get_following_url(&config.instance_url(), &username);
let collection = OrderedCollection::new(collection_url);
let response = HttpResponse::Ok()
.content_type(ACTIVITY_CONTENT_TYPE)
.json(collection);
Ok(response)
}
pub fn activitypub_scope() -> Scope {
web::scope("/users/{username}")
.service(get_actor)
.service(inbox)
.service(followers_collection)
.service(following_collection)
}
#[get("/objects/{object_id}")]
pub async fn get_object(
web::Path(_object_id): web::Path<String>,
) -> Result<HttpResponse, HttpError> {
// WARNING: activities/objects are not stored
let response = HttpResponse::Gone().body("");
Ok(response)
}

View file

@ -0,0 +1,14 @@
// Activity types
pub const ACCEPT: &str = "Accept";
pub const CREATE: &str = "Create";
pub const FOLLOW: &str = "Follow";
pub const UNDO: &str = "Undo";
pub const UPDATE: &str = "Update";
// Actor types
pub const PERSON: &str = "Person";
// Object types
pub const DOCUMENT: &str = "Document";
pub const IMAGE: &str = "Image";
pub const NOTE: &str = "Note";

87
src/bin/mitractl.rs Normal file
View file

@ -0,0 +1,87 @@
use clap::Clap;
use tokio;
use uuid::Uuid;
use mitra::config;
use mitra::database::{create_pool, get_database_client};
use mitra::database::migrate::apply_migrations;
use mitra::ethereum::utils::generate_ethereum_address;
use mitra::logger::configure_logger;
use mitra::models::profiles::queries as profiles;
use mitra::models::users::queries::{
generate_invite_code,
get_invite_codes,
};
/// Admin CLI tool
#[derive(Clap)]
struct Opts {
#[clap(subcommand)]
subcmd: SubCommand,
}
#[derive(Clap)]
enum SubCommand {
DeleteProfile(DeleteProfile),
GenerateInviteCode(GenerateInviteCode),
ListInviteCodes(ListInviteCodes),
GenerateEthereumAddress(GenerateEthereumAddress),
}
/// Delete profile
#[derive(Clap)]
struct DeleteProfile {
/// Print debug info
#[clap(short)]
id: Uuid,
}
/// Generate invite code
#[derive(Clap)]
struct GenerateInviteCode { }
/// List invite codes
#[derive(Clap)]
struct ListInviteCodes { }
/// Generate ethereum address
#[derive(Clap)]
struct GenerateEthereumAddress { }
#[tokio::main]
async fn main() {
let config = config::parse_config();
configure_logger();
let db_pool = create_pool(&config.database_url);
apply_migrations(&db_pool).await;
let db_client = get_database_client(&db_pool).await.unwrap();
let opts: Opts = Opts::parse();
match opts.subcmd {
SubCommand::DeleteProfile(subopts) => {
profiles::delete_profile(&**db_client, &subopts.id).await.unwrap();
println!("profile deleted");
},
SubCommand::GenerateInviteCode(_) => {
let invite_code = generate_invite_code(&**db_client).await.unwrap();
println!("generated invite code: {}", invite_code);
},
SubCommand::ListInviteCodes(_) => {
let invite_codes = get_invite_codes(&**db_client).await.unwrap();
if invite_codes.len() == 0 {
println!("no invite codes found");
return;
}
for code in invite_codes {
println!("{}", code);
}
},
SubCommand::GenerateEthereumAddress(_) => {
let (private_key, address) = generate_ethereum_address();
println!(
"address {:?}; private key {}",
address, private_key,
);
},
};
}

159
src/config.rs Normal file
View file

@ -0,0 +1,159 @@
use std::path::PathBuf;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer};
use url::{Url, ParseError as UrlParseError};
use crate::errors::ConversionError;
#[derive(Clone, Debug)]
pub enum Environment {
Development,
Production,
}
impl FromStr for Environment {
type Err = ConversionError;
fn from_str(val: &str) -> Result<Self, Self::Err> {
let environment = match val {
"development" => Environment::Development,
"production" => Environment::Production,
_ => return Err(ConversionError),
};
Ok(environment)
}
}
fn environment_from_str<'de, D>(deserializer: D) -> Result<Environment, D::Error>
where
D: Deserializer<'de>,
{
let s: String = Deserialize::deserialize(deserializer)?;
Environment::from_str(&s).map_err(de::Error::custom)
}
#[derive(Clone)]
pub struct EnvConfig {
pub environment: Option<Environment>,
pub config_path: String,
pub crate_version: String,
}
fn parse_env() -> EnvConfig {
dotenv::from_filename(".env.local").ok();
dotenv::dotenv().ok();
let environment_str = std::env::var("ENVIRONMENT").ok();
let environment = environment_str
.map(|val| Environment::from_str(&val).expect("invalid environment type"));
let config_path = std::env::var("CONFIG_PATH")
.unwrap_or("config.yaml".to_string());
let crate_version = env!("CARGO_PKG_VERSION").to_string();
EnvConfig {
environment,
config_path,
crate_version,
}
}
fn default_environment() -> Environment { Environment::Development }
fn default_storage_dir() -> PathBuf { PathBuf::from("files") }
fn default_contract_dir() -> PathBuf { PathBuf::from("contracts") }
fn default_cookie_name() -> String { "session".to_string() }
#[derive(Clone, Deserialize)]
pub struct EthereumContract {
pub address: String,
pub chain_id: u32,
pub signing_key: String,
}
#[derive(Clone, Deserialize)]
pub struct Config {
#[serde(default = "default_environment")]
#[serde(deserialize_with = "environment_from_str")]
pub environment: Environment,
#[serde(skip)]
pub version: String,
// Core settings
pub database_url: String,
#[serde(default = "default_storage_dir")]
pub storage_dir: PathBuf,
pub http_host: String,
pub http_port: u32,
#[serde(default = "default_cookie_name")]
pub cookie_name: String,
pub cookie_secret_key: String,
// Instance info
pub instance_uri: String,
pub instance_title: String,
pub instance_short_description: String,
pub instance_description: String,
#[serde(default)]
pub registrations_open: bool, // default is false
pub login_message: String,
// Ethereum & IPFS
#[serde(default = "default_contract_dir")]
pub contract_dir: PathBuf,
pub ethereum_json_rpc_url: Option<String>,
pub ethereum_explorer_url: Option<String>,
pub ethereum_contract: Option<EthereumContract>,
pub ipfs_api_url: Option<String>,
pub ipfs_gateway_url: Option<String>,
}
impl Config {
fn try_instance_url(&self) -> Result<Url, UrlParseError> {
// TODO: allow http in production
let scheme = match self.environment {
Environment::Development => "http",
Environment::Production => "https",
};
let url_str = format!("{}://{}", scheme, self.instance_uri);
Url::parse(&url_str)
}
pub fn instance_url(&self) -> String {
self.try_instance_url().unwrap().origin().ascii_serialization()
}
pub fn media_dir(&self) -> PathBuf {
self.storage_dir.join("media")
}
}
pub fn parse_config() -> Config {
let env = parse_env();
let config_yaml = std::fs::read_to_string(env.config_path)
.expect("failed to load config file");
let mut config = serde_yaml::from_str::<Config>(&config_yaml)
.expect("invalid yaml data");
// Override environment parameter in config if env variable is set
config.environment = env.environment.unwrap_or(config.environment);
// Set_version
config.version = env.crate_version;
// Validate config
if !config.storage_dir.exists() {
panic!("storage_dir does not exist");
};
if !config.contract_dir.exists() {
panic!("contract directory does not exist");
};
config.try_instance_url().expect("invalid instance URI");
config
}

23
src/database/migrate.rs Normal file
View file

@ -0,0 +1,23 @@
use crate::database::Pool;
mod embedded {
use refinery::embed_migrations;
embed_migrations!("migrations");
}
pub async fn apply_migrations(pool: &Pool) {
// https://github.com/rust-db/refinery/issues/105
let mut client_object = pool.get().await.unwrap();
let client = &mut *(*client_object);
let migration_report = embedded::migrations::runner()
.run_async(client)
.await.unwrap();
for migration in migration_report.applied_migrations() {
log::info!(
"Migration Applied - Name: {}, Version: {}",
migration.name(),
migration.version(),
);
}
}

26
src/database/mod.rs Normal file
View file

@ -0,0 +1,26 @@
pub mod migrate;
pub type Pool = deadpool_postgres::Pool;
pub fn create_pool(database_url: &str) -> Pool {
let pool = deadpool_postgres::Pool::new(
deadpool_postgres::Manager::new(
database_url.parse().expect("invalid database URL"),
tokio_postgres::NoTls,
),
// https://wiki.postgresql.org/wiki/Number_Of_Database_Connections
num_cpus::get() * 2,
);
pool
}
use crate::errors::DatabaseError;
pub async fn get_database_client(pool: &Pool)
-> Result<deadpool_postgres::Client, DatabaseError>
{
// Returns wrapped client
// https://github.com/bikeshedder/deadpool/issues/56
let client = pool.get().await?;
Ok(client)
}

100
src/errors.rs Normal file
View file

@ -0,0 +1,100 @@
use actix_web::{
dev::HttpResponseBuilder,
http::StatusCode,
HttpResponse,
error::ResponseError,
};
use serde::Serialize;
#[derive(thiserror::Error, Debug)]
#[error("conversion error")]
pub struct ConversionError;
#[derive(thiserror::Error, Debug)]
#[error("{0}")]
pub struct ValidationError(pub &'static str);
#[derive(thiserror::Error, Debug)]
pub enum DatabaseError {
#[error("database pool error")]
DatabasePoolError(#[from] deadpool_postgres::PoolError),
#[error("database client error")]
DatabaseClientError(#[from] tokio_postgres::Error),
#[error("database type error")]
DatabaseTypeError(#[from] ConversionError),
#[error("{0}")]
NotFound(&'static str), // object type
#[error("{0}")]
AlreadyExists(&'static str), // object type
}
#[derive(thiserror::Error, Debug)]
pub enum HttpError {
#[error(transparent)]
ActixError(#[from] actix_web::Error),
#[error("database error")]
DatabaseError(#[source] DatabaseError),
#[error("{0}")]
ValidationError(String),
#[error("{0}")]
ValidationErrorAuto(#[from] ValidationError),
#[error("{0}")]
SessionError(&'static str),
#[error("permission error")]
PermissionError,
#[error("{0} not found")]
NotFoundError(&'static str),
#[error("operation not supported")]
NotSupported,
#[error("internal error")]
InternalError,
}
impl From<DatabaseError> for HttpError {
fn from(err: DatabaseError) -> Self {
match err {
DatabaseError::NotFound(name) => HttpError::NotFoundError(name),
DatabaseError::AlreadyExists(name) => HttpError::ValidationError(
format!("{} already exists", name),
),
_ => HttpError::DatabaseError(err),
}
}
}
#[derive(Serialize)]
struct ErrorInfo {
message: String,
}
impl ResponseError for HttpError {
fn error_response(&self) -> HttpResponse {
let err = ErrorInfo { message: self.to_string() };
HttpResponseBuilder::new(self.status_code()).json(err)
}
fn status_code(&self) -> StatusCode {
match self {
HttpError::ActixError(err) => err.as_response_error().status_code(),
HttpError::ValidationError(_) => StatusCode::BAD_REQUEST,
HttpError::ValidationErrorAuto(_) => StatusCode::BAD_REQUEST,
HttpError::SessionError(_) => StatusCode::UNAUTHORIZED,
HttpError::PermissionError => StatusCode::FORBIDDEN,
HttpError::NotFoundError(_) => StatusCode::NOT_FOUND,
HttpError::NotSupported => StatusCode::IM_A_TEAPOT,
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}

10
src/ethereum/api.rs Normal file
View file

@ -0,0 +1,10 @@
use web3::{
api::Web3,
transports::Http,
};
pub fn connect(json_rpc_url: &str) -> Result<Web3<Http>, web3::Error> {
let transport = web3::transports::Http::new(json_rpc_url)?;
let connection = web3::Web3::new(transport);
Ok(connection)
}

3
src/ethereum/mod.rs Normal file
View file

@ -0,0 +1,3 @@
mod api;
pub mod nft;
pub mod utils;

232
src/ethereum/nft.rs Normal file
View file

@ -0,0 +1,232 @@
use std::convert::TryInto;
use std::fs;
use std::path::PathBuf;
use web3::{
api::Web3,
contract::{Contract, Options},
ethabi::{Event, EventParam, ParamType, RawLog, token::Token, encode},
transports::Http,
types::{BlockNumber, FilterBuilder, H256, U256},
};
use crate::config::{Config, EthereumContract};
use crate::database::{Pool, get_database_client};
use crate::errors::DatabaseError;
use crate::ipfs::utils::parse_ipfs_url;
use crate::models::posts::queries::{
get_post_by_ipfs_cid,
update_post,
is_waiting_for_token,
};
use super::api::connect;
use super::utils::{
parse_address, sign_message,
AddressError, SignatureData, SignatureError,
};
pub const COLLECTIBLE: &str = "Collectible";
pub const MINTER: &str = "Minter";
#[derive(thiserror::Error, Debug)]
pub enum EthereumError {
#[error("io error")]
IoError(#[from] std::io::Error),
#[error("json error")]
JsonError(#[from] serde_json::Error),
#[error("invalid address")]
InvalidAddress(#[from] AddressError),
#[error(transparent)]
Web3Error(#[from] web3::Error),
#[error("artifact error")]
ArtifactError,
#[error("abi error")]
AbiError(#[from] web3::ethabi::Error),
#[error("contract error")]
ContractError(#[from] web3::contract::Error),
#[error("improprely configured")]
ImproperlyConfigured,
#[error("data conversion error")]
ConversionError,
#[error("token uri parsing error")]
TokenUriParsingError,
#[error(transparent)]
DatabaseError(#[from] DatabaseError),
#[error("signature error")]
SigError(#[from] SignatureError),
}
fn load_abi(
contract_dir: &PathBuf,
contract_name: &str,
) -> Result<Vec<u8>, EthereumError> {
let contract_artifact_path = contract_dir.join(format!("{}.json", contract_name));
let contract_artifact = fs::read_to_string(contract_artifact_path)?;
let contract_artifact_value: serde_json::Value = serde_json::from_str(&contract_artifact)?;
let contract_abi = contract_artifact_value.get("abi")
.ok_or(EthereumError::ArtifactError)?
.to_string().as_bytes().to_vec();
Ok(contract_abi)
}
pub async fn get_nft_contract(
config: &Config,
) -> Result<(Web3<Http>, Contract<Http>), EthereumError> {
let json_rpc_url = config.ethereum_json_rpc_url.as_ref()
.ok_or(EthereumError::ImproperlyConfigured)?;
let web3 = connect(json_rpc_url)?;
let ethereum_config = config.ethereum_contract.as_ref()
.ok_or(EthereumError::ImproperlyConfigured)?;
let minter_abi = load_abi(&config.contract_dir, MINTER)?;
let minter_address = parse_address(&ethereum_config.address)?;
let minter = Contract::from_json(
web3.eth(),
minter_address,
&minter_abi,
)?;
let token_address = minter.query("token", (), None, Options::default(), None).await?;
let token_abi = load_abi(&config.contract_dir, COLLECTIBLE)?;
let token = Contract::from_json(
web3.eth(),
token_address,
&token_abi,
)?;
log::info!("NFT contract address is {:?}", token.address());
Ok((web3, token))
}
#[derive(Debug)]
struct TokenTransfer {
tx_id: Option<H256>,
from: Token,
to: Token,
token_id: Token,
}
pub async fn process_events(
web3: &Web3<Http>,
contract: &Contract<Http>,
db_pool: &Pool,
) -> Result<(), EthereumError> {
let db_client = &**get_database_client(&db_pool).await?;
if !is_waiting_for_token(db_client).await? {
return Ok(());
}
// Search for Transfer events
let event_abi_params = vec![
EventParam {
name: "from".to_string(),
kind: ParamType::Address,
indexed: true,
},
EventParam {
name: "to".to_string(),
kind: ParamType::Address,
indexed: true,
},
EventParam {
name: "tokenId".to_string(),
kind: ParamType::Uint(256),
indexed: true,
},
];
let event_abi = Event {
name: "Transfer".to_string(),
inputs: event_abi_params,
anonymous: false,
};
let filter = FilterBuilder::default()
.address(vec![contract.address()])
.topics(Some(vec![event_abi.signature()]), None, None, None)
.from_block(BlockNumber::Earliest)
.build();
let logs = web3.eth().logs(filter).await?;
// Convert web3 logs into ethabi logs
let transfers: Vec<TokenTransfer> = logs.iter().map(|log| {
let raw_log = RawLog {
topics: log.topics.clone(),
data: log.data.clone().0,
};
match event_abi.parse_log(raw_log) {
Ok(event) => {
let params = event.params;
let transfer = TokenTransfer {
tx_id: log.transaction_hash,
from: params[0].value.clone(),
to: params[1].value.clone(),
token_id: params[2].value.clone(),
};
Ok(transfer)
},
Err(err) => Err(err),
}
}).collect::<Result<_, web3::ethabi::Error>>()?;
for transfer in transfers {
let from_address = transfer.from.into_address()
.ok_or(EthereumError::ConversionError)?;
if from_address.is_zero() {
// Mint event found
let token_id_u256 = transfer.token_id.into_uint()
.ok_or(EthereumError::ConversionError)?;
let token_uri_result = contract.query("tokenURI", (token_id_u256,), None, Options::default(), None);
let token_uri: String = token_uri_result.await?;
let tx_id_h256 = transfer.tx_id.ok_or(EthereumError::ConversionError)?;
let tx_id = hex::encode(tx_id_h256.as_bytes());
let ipfs_cid = parse_ipfs_url(&token_uri)
.map_err(|_| EthereumError::TokenUriParsingError)?;
let mut post = match get_post_by_ipfs_cid(db_client, &ipfs_cid).await {
Ok(post) => post,
Err(err) => {
// Post not found or some other error
log::error!("{}", err);
continue;
},
};
if post.token_id.is_none() {
log::info!("post {} was tokenized via {}", post.id, tx_id);
let token_id: i32 = token_id_u256.try_into()
.map_err(|_| EthereumError::ConversionError)?;
post.token_id = Some(token_id);
post.token_tx_id = Some(tx_id);
update_post(db_client, &post).await?;
};
};
};
Ok(())
}
pub fn create_mint_signature(
contract_config: &EthereumContract,
user_address: &str,
token_uri: &str,
) -> Result<SignatureData, EthereumError> {
let contract_address = parse_address(&contract_config.address)?;
let user_address = parse_address(user_address)?;
let chain_id: U256 = contract_config.chain_id.into();
let chain_id_token = Token::Uint(chain_id.into());
let chain_id_bin = encode(&[chain_id_token]);
let message = [
&chain_id_bin,
contract_address.as_bytes(),
"mint".as_bytes(),
user_address.as_bytes(),
token_uri.as_bytes(),
].concat();
let signature = sign_message(&contract_config.signing_key, &message)?;
Ok(signature)
}

59
src/ethereum/utils.rs Normal file
View file

@ -0,0 +1,59 @@
use std::str::FromStr;
use secp256k1::{Error as KeyError, SecretKey, rand::rngs::OsRng};
use serde::Serialize;
use web3::{
signing::{keccak256, Key, SigningError},
types::Address,
};
pub fn generate_ethereum_address() -> (SecretKey, Address) {
let mut rng = OsRng::new().expect("failed to initialize RNG");
let secret_key = SecretKey::new(&mut rng);
let address = Box::new(secret_key).address();
(secret_key, address)
}
#[derive(thiserror::Error, Debug)]
#[error("address error")]
pub struct AddressError;
pub fn parse_address(address: &str) -> Result<Address, AddressError> {
Address::from_str(address).map_err(|_| AddressError)
}
#[derive(Serialize)]
pub struct SignatureData {
pub v: u64,
pub r: String,
pub s: String,
}
#[derive(thiserror::Error, Debug)]
pub enum SignatureError {
#[error("invalid key")]
InvalidKey(#[from] KeyError),
#[error("signing error")]
SigningError(#[from] SigningError),
}
pub fn sign_message(
signing_key: &str,
message: &[u8],
) -> Result<SignatureData, SignatureError> {
let key = SecretKey::from_str(&signing_key)?;
let message_hash = keccak256(message);
let eip_191_message = [
"\x19Ethereum Signed Message:\n32".as_bytes(),
&message_hash,
].concat();
let eip_191_message_hash = keccak256(&eip_191_message);
let signature = Box::new(key).sign(&eip_191_message_hash, None)?;
let signature_data = SignatureData {
v: signature.v,
r: hex::encode(signature.r.as_bytes()),
s: hex::encode(signature.s.as_bytes()),
};
Ok(signature_data)
}

View file

@ -0,0 +1,96 @@
use chrono::Utc;
use rsa::RsaPrivateKey;
use crate::utils::crypto::{sign_message, get_message_digest};
pub struct SignatureHeaders {
pub host: String,
pub date: String,
pub digest: String,
pub signature: String,
}
#[derive(thiserror::Error, Debug)]
pub enum SignatureError {
#[error("invalid request url")]
UrlError,
#[error("signature error")]
SignatureError(#[from] rsa::errors::Error),
}
/// Creates HTTP signature according to the old HTTP Signatures Spec:
/// https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures.
pub fn create_http_signature(
request_url: &str,
request_body: &str,
actor_key: RsaPrivateKey,
actor_key_id: String,
) -> Result<SignatureHeaders, SignatureError> {
let request_url_object = url::Url::parse(request_url)
.map_err(|_| SignatureError::UrlError)?;
let host = request_url_object.host_str()
.ok_or(SignatureError::UrlError)?;
let date = Utc::now().to_rfc2822();
let digest = get_message_digest(request_body);
let message = format!(
"(request-target): post {}\nhost: {}\ndate: {}\ndigest: {}",
request_url_object.path(),
host,
date,
digest,
);
let headers_parameter = &["(request-target)", "host", "date", "digest"];
let signature_parameter = sign_message(&actor_key, &message)?;
let signature_header = format!(
r#"keyId="{}",headers="{}",signature="{}""#,
actor_key_id,
headers_parameter.join(" "),
signature_parameter,
);
let headers = SignatureHeaders {
host: host.to_string(),
date,
digest,
signature: signature_header,
};
Ok(headers)
}
#[cfg(test)]
mod tests {
use rand::rngs::OsRng;
use super::*;
#[test]
fn test_create_signature() {
let request_url = "https://example.org/inbox";
let request_body = "{}";
let actor_key = RsaPrivateKey::new(&mut OsRng, 512).unwrap();
let actor_key_id = "https://myserver.org/actor#main-key";
let result = create_http_signature(
request_url,
request_body,
actor_key,
actor_key_id.to_string(),
);
assert_eq!(result.is_ok(), true);
let headers = result.unwrap();
assert_eq!(headers.host, "example.org");
assert_eq!(
headers.digest,
"SHA-256=RBNvo1WzZ4oRRq0W9+hknpT7T8If536DEMBg9hyq/4o=",
);
let expected_signature_header = concat!(
r#"keyId="https://myserver.org/actor#main-key","#,
r#"headers="(request-target) host date digest","#,
r#"signature=""#,
);
assert_eq!(
headers.signature.starts_with(expected_signature_header),
true,
);
}
}

View file

@ -0,0 +1,2 @@
pub mod create;
pub mod verify;

View file

@ -0,0 +1,197 @@
use actix_web::{
HttpRequest,
http::{HeaderMap, Method, Uri},
};
use regex::Regex;
use crate::activitypub::actor::Actor;
use crate::activitypub::fetcher::fetch_profile_by_actor_id;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::DatabaseError;
use crate::models::profiles::queries::{
get_profile_by_actor_id,
create_profile,
};
use crate::utils::crypto::{deserialize_public_key, verify_signature};
#[derive(thiserror::Error, Debug)]
pub enum VerificationError {
#[error("{0}")]
HeaderError(&'static str),
#[error("{0}")]
ParseError(&'static str),
#[error("invalid key ID")]
UrlError(#[from] url::ParseError),
#[error("actor error")]
ActorError,
#[error("invalid key")]
InvalidKey(#[from] rsa::pkcs8::Error),
#[error("invalid signature")]
InvalidSignature,
}
pub struct SignatureData {
pub actor_id: String,
pub message: String, // reconstructed message
pub signature: String, // base64-encoded signature
}
fn parse_http_signature(
request_method: &Method,
request_uri: &Uri,
request_headers: &HeaderMap,
) -> Result<SignatureData, VerificationError> {
let signature_header = request_headers.get("signature")
.ok_or(VerificationError::HeaderError("missing signature header"))?
.to_str()
.map_err(|_| VerificationError::HeaderError("invalid signature header"))?;
// TODO: support arbitrary parameter order
let signature_header_regexp_raw = concat!(
r#"keyId="(?P<key_id>.+)","#,
r#"headers="(?P<headers>.+)","#,
r#"signature="(?P<signature>.+)""#,
);
let signature_header_regexp = Regex::new(signature_header_regexp_raw).unwrap();
let signature_header_caps = signature_header_regexp
.captures(&signature_header)
.ok_or(VerificationError::HeaderError("invalid signature header"))?;
let key_id = signature_header_caps.name("key_id")
.ok_or(VerificationError::ParseError("keyId parameter is missing"))?
.as_str()
.to_owned();
let headers_parameter = signature_header_caps.name("headers")
.ok_or(VerificationError::ParseError("headers parameter is missing"))?
.as_str()
.to_owned();
let signature = signature_header_caps.name("signature")
.ok_or(VerificationError::ParseError("signature is missing"))?
.as_str()
.to_owned();
let mut message = format!(
"(request-target): {} {}",
request_method.as_str().to_lowercase(),
request_uri,
);
for header in headers_parameter.split(" ") {
if header == "(request-target)" {
continue;
}
let header_value = request_headers.get(header)
.ok_or(VerificationError::HeaderError("missing header"))?
.to_str()
.map_err(|_| VerificationError::HeaderError("invalid header value"))?;
let message_part = format!(
"\n{}: {}",
header,
header_value,
);
message.push_str(&message_part);
}
let key_url = url::Url::parse(&key_id)?;
let actor_id = &key_url[..url::Position::BeforeQuery];
let signature_data = SignatureData {
actor_id: actor_id.to_string(),
message,
signature,
};
Ok(signature_data)
}
pub async fn verify_http_signature(
config: &Config,
db_pool: &Pool,
request: &HttpRequest,
) -> Result<(), VerificationError> {
let signature_data = parse_http_signature(
request.method(),
request.uri(),
request.headers(),
)?;
let db_client = &**get_database_client(db_pool).await
.map_err(|_| VerificationError::ActorError)?;
let actor_profile = match get_profile_by_actor_id(db_client, &signature_data.actor_id).await {
Ok(profile) => profile,
Err(err) => match err {
DatabaseError::NotFound(_) => {
let profile_data = fetch_profile_by_actor_id(
&signature_data.actor_id,
&config.media_dir(),
).await.map_err(|err| {
log::error!("{}", err);
VerificationError::ActorError
})?;
let profile = create_profile(
db_client,
&profile_data,
).await.map_err(|_| VerificationError::ActorError)?;
profile
},
_ => {
return Err(VerificationError::ActorError);
},
},
};
let actor_value = actor_profile.actor_json.ok_or(VerificationError::ActorError)?;
let actor: Actor = serde_json::from_value(actor_value)
.map_err(|_| VerificationError::ActorError)?;
let public_key = deserialize_public_key(&actor.public_key.public_key_pem)?;
let is_valid_signature = verify_signature(
&public_key,
&signature_data.message,
&signature_data.signature,
).map_err(|_| VerificationError::InvalidSignature)?;
if !is_valid_signature {
return Err(VerificationError::InvalidSignature);
}
Ok(())
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use actix_web::http::{header, HeaderMap, HeaderName, HeaderValue, Uri};
use super::*;
#[test]
fn test_parse_signature() {
let request_method = Method::from_str("POST").unwrap();
let request_uri = "/user/123/inbox".parse::<Uri>().unwrap();
let mut request_headers = HeaderMap::new();
request_headers.insert(
header::HOST,
HeaderValue::from_static("example.com"),
);
let signature_header = concat!(
r#"keyId="https://myserver.org/actor#main-key","#,
r#"headers="(request-target) host","#,
r#"signature="test""#,
);
request_headers.insert(
HeaderName::from_static("signature"),
HeaderValue::from_static(signature_header),
);
let signature_data = parse_http_signature(
&request_method,
&request_uri,
&request_headers,
).unwrap();
assert_eq!(signature_data.actor_id, "https://myserver.org/actor");
assert_eq!(
signature_data.message,
"(request-target): post /user/123/inbox\nhost: example.com",
);
assert_eq!(signature_data.signature, "test");
}
}

2
src/ipfs/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod store;
pub mod utils;

26
src/ipfs/store.rs Normal file
View file

@ -0,0 +1,26 @@
/// https://docs.ipfs.io/reference/http/api/
use reqwest::{multipart, Client};
use serde::Deserialize;
#[derive(Deserialize)]
#[serde(rename_all="PascalCase")]
struct ObjectAdded {
hash: String,
}
/// Add file to IPFS.
/// Returns CID v1 of the object.
pub async fn add(ipfs_api_url: &str, data: Vec<u8>) -> Result<String, reqwest::Error> {
let client = Client::new();
let file_part = multipart::Part::bytes(data);
let form = multipart::Form::new().part("file", file_part);
let url = format!("{}/api/v0/add", ipfs_api_url);
let response = client.post(&url)
.query(&[("cid-version", 1)])
.multipart(form)
.send()
.await?;
let info: ObjectAdded = response.json().await?;
Ok(info.hash)
}

20
src/ipfs/utils.rs Normal file
View file

@ -0,0 +1,20 @@
use regex::Regex;
pub const IPFS_LOGO: &str = "bafybeihc4hti5ix4ds2tefhy35qd4c7n5as5cazdmksrxj7ipvcxm64h54";
pub fn get_ipfs_url(cid: &str) -> String {
format!("ipfs://{}", cid)
}
#[derive(thiserror::Error, Debug)]
#[error("parse error")]
pub struct ParseError;
pub fn parse_ipfs_url(url: &str) -> Result<String, ParseError> {
let regexp = Regex::new(r"ipfs://(?P<cid>\w+)").unwrap();
let caps = regexp.captures(&url).ok_or(ParseError)?;
let cid = caps.name("cid")
.ok_or(ParseError)?
.as_str().to_string();
Ok(cid)
}

14
src/lib.rs Normal file
View file

@ -0,0 +1,14 @@
pub mod activitypub;
pub mod config;
pub mod database;
mod errors;
pub mod ethereum;
mod http_signatures;
mod ipfs;
pub mod logger;
pub mod mastodon_api;
pub mod models;
pub mod nodeinfo;
pub mod scheduler;
mod utils;
pub mod webfinger;

17
src/logger.rs Normal file
View file

@ -0,0 +1,17 @@
use chrono::Local;
use std::io::Write;
pub fn configure_logger() -> () {
env_logger::Builder::new()
.format(|buf, record| {
writeln!(buf,
"{} {} [{}] {}",
Local::now().format("%Y-%m-%dT%H:%M:%S"),
record.target(),
record.level(),
record.args(),
)
})
.filter(None, log::LevelFilter::Info)
.init();
}

101
src/main.rs Normal file
View file

@ -0,0 +1,101 @@
use actix_cors::Cors;
use actix_session::CookieSession;
use actix_web::{
web,
App, HttpServer,
middleware::Logger as ActixLogger,
};
use mitra::activitypub::views::{activitypub_scope, get_object};
use mitra::config::{Environment, parse_config};
use mitra::database::create_pool;
use mitra::database::migrate::apply_migrations;
use mitra::logger::configure_logger;
use mitra::mastodon_api::accounts::views::account_api_scope;
use mitra::mastodon_api::directory::views::profile_directory;
use mitra::mastodon_api::instance::views as instance_api;
use mitra::mastodon_api::media::views::media_api_scope;
use mitra::mastodon_api::search::views::search;
use mitra::mastodon_api::statuses::views::status_api_scope;
use mitra::mastodon_api::timelines::views as timeline_api;
use mitra::mastodon_api::users::views as user_api;
use mitra::nodeinfo::views as nodeinfo;
use mitra::scheduler;
use mitra::webfinger::views as webfinger;
const MAX_UPLOAD_SIZE: usize = 1024 * 1024 * 10;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let config = parse_config();
configure_logger();
let db_pool = create_pool(&config.database_url);
apply_migrations(&db_pool).await;
log::info!(
"app initialized; environment = '{:?}'",
config.environment,
);
scheduler::run(config.clone(), db_pool.clone());
log::info!("scheduler started");
let http_socket_addr = format!(
"{}:{}",
config.http_host,
config.http_port,
);
let num_workers = std::cmp::max(num_cpus::get(), 4);
HttpServer::new(move || {
let cors_config = match config.environment {
Environment::Development => {
Cors::permissive()
},
Environment::Production => {
let allowed_origin = config.instance_url();
Cors::default().allowed_origin(&allowed_origin)
.allow_any_method()
.allow_any_header()
},
};
let cookie_config = CookieSession::signed(config.cookie_secret_key.as_bytes())
.name(config.cookie_name.clone())
.max_age(86400 * 30)
.secure(true);
App::new()
.wrap(ActixLogger::new("%r : %s : %{r}a"))
.wrap(cors_config)
.wrap(cookie_config)
.data(web::PayloadConfig::default().limit(MAX_UPLOAD_SIZE))
.data(web::JsonConfig::default().limit(MAX_UPLOAD_SIZE))
.data(config.clone())
.data(db_pool.clone())
.service(actix_files::Files::new(
"/media",
config.media_dir(),
))
.service(actix_files::Files::new(
"/contracts",
config.contract_dir.clone(),
))
.service(user_api::create_user_view)
.service(user_api::login_view)
.service(user_api::current_user_view)
.service(user_api::logout_view)
.service(profile_directory)
.service(account_api_scope())
.service(media_api_scope())
.service(status_api_scope())
.service(instance_api::instance)
.service(search)
.service(timeline_api::home_timeline)
.service(webfinger::get_descriptor)
.service(activitypub_scope())
.service(get_object)
.service(nodeinfo::get_nodeinfo)
.service(nodeinfo::get_nodeinfo_2_0)
})
.workers(num_workers)
.bind(http_socket_addr)?
.run()
.await
}

View file

@ -0,0 +1,2 @@
pub mod types;
pub mod views;

View file

@ -0,0 +1,118 @@
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::models::profiles::types::{DbActorProfile, ProfileUpdateData};
use crate::utils::files::{FileError, save_validated_b64_file, get_file_url};
/// https://docs.joinmastodon.org/entities/source/
#[derive(Serialize)]
pub struct Source {
pub note: Option<String>,
}
/// https://docs.joinmastodon.org/entities/account/
#[derive(Serialize)]
pub struct Account {
pub id: Uuid,
pub username: String,
pub acct: String,
pub display_name: Option<String>,
pub created_at: DateTime<Utc>,
pub note: Option<String>,
pub avatar: Option<String>,
pub header: Option<String>,
pub followers_count: i32,
pub following_count: i32,
pub statuses_count: i32,
pub source: Option<Source>,
}
impl Account {
pub fn from_profile(profile: DbActorProfile, instance_url: &str) -> Self {
let avatar_url = profile.avatar_file_name.map(|name| get_file_url(instance_url, &name));
let header_url = profile.banner_file_name.map(|name| get_file_url(instance_url, &name));
let source = if profile.actor_json.is_some() {
// Remote actor
None
} else {
let source = Source { note: profile.bio_source };
Some(source)
};
Self {
id: profile.id,
username: profile.username,
acct: profile.acct,
display_name: profile.display_name,
created_at: profile.created_at,
note: profile.bio,
avatar: avatar_url,
header: header_url,
followers_count: profile.follower_count,
following_count: profile.following_count,
statuses_count: profile.post_count,
source,
}
}
}
/// https://docs.joinmastodon.org/methods/accounts/
#[derive(Deserialize)]
pub struct AccountUpdateData {
pub display_name: Option<String>,
pub note: Option<String>,
pub note_source: Option<String>,
pub avatar: Option<String>,
pub header: Option<String>,
}
fn process_b64_image_field_value(
form_value: Option<String>,
db_value: Option<String>,
output_dir: &PathBuf,
) -> Result<Option<String>, FileError> {
let maybe_file_name = match form_value {
Some(b64_data) => {
if b64_data == "" {
// Remove file
None
} else {
// Decode and save file
let (file_name, _) = save_validated_b64_file(
&b64_data, &output_dir, "image/",
)?;
Some(file_name)
}
},
// Keep current value
None => db_value,
};
Ok(maybe_file_name)
}
impl AccountUpdateData {
pub fn into_profile_data(
self,
current_avatar: &Option<String>,
current_banner: &Option<String>,
media_dir: &PathBuf,
) -> Result<ProfileUpdateData, FileError> {
let avatar = process_b64_image_field_value(
self.avatar, current_avatar.clone(), media_dir,
)?;
let banner = process_b64_image_field_value(
self.header, current_banner.clone(), media_dir,
)?;
let profile_data = ProfileUpdateData {
display_name: self.display_name,
bio: self.note,
bio_source: self.note_source,
avatar,
banner,
};
Ok(profile_data)
}
}

View file

@ -0,0 +1,207 @@
use actix_session::Session;
use actix_web::{get, post, patch, web, HttpResponse, Scope};
use serde::Deserialize;
use uuid::Uuid;
use crate::activitypub::activity::{
create_activity_follow,
create_activity_undo_follow,
};
use crate::activitypub::actor::Actor;
use crate::activitypub::deliverer::deliver_activity;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::mastodon_api::statuses::types::Status;
use crate::mastodon_api::users::auth::get_current_user;
use crate::models::posts::queries::get_posts_by_author;
use crate::models::profiles::queries::{
get_profile_by_id,
update_profile,
};
use crate::models::relationships::queries as follows;
use crate::utils::files::FileError;
use super::types::{Account, AccountUpdateData};
#[get("/{account_id}")]
async fn get_account(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
web::Path(account_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let profile = get_profile_by_id(db_client, &account_id).await?;
let account = Account::from_profile(profile, &config.instance_url());
Ok(HttpResponse::Ok().json(account))
}
#[patch("/update_credentials")]
async fn update_credentials(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
data: web::Json<AccountUpdateData>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let profile = get_profile_by_id(db_client, &current_user.id).await?;
let mut profile_data = data.into_inner()
.into_profile_data(
&profile.avatar_file_name,
&profile.banner_file_name,
&config.media_dir(),
)
.map_err(|err| {
match err {
FileError::Base64DecodingError(_) => {
HttpError::ValidationError("base64 decoding error".into())
},
FileError::InvalidMediaType => {
HttpError::ValidationError("invalid media type".into())
},
_ => HttpError::InternalError,
}
})?;
profile_data.clean()?;
let updated_profile = update_profile(
db_client,
&profile.id,
profile_data,
).await?;
let account = Account::from_profile(updated_profile, &config.instance_url());
Ok(HttpResponse::Ok().json(account))
}
// TODO: actix currently doesn't support parameter arrays
// https://github.com/actix/actix-web/issues/2044
#[derive(Deserialize)]
pub struct RelationshipQueryParams {
#[serde(rename(deserialize = "id[]"))]
id: Uuid,
}
#[get("/relationships")]
async fn get_relationships(
db_pool: web::Data<Pool>,
session: Session,
query_params: web::Query<RelationshipQueryParams>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let relationships = follows::get_relationships(
db_client,
current_user.id,
vec![query_params.into_inner().id],
).await?;
Ok(HttpResponse::Ok().json(relationships))
}
#[post("/{account_id}/follow")]
async fn follow(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
web::Path(account_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &mut **get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let profile = get_profile_by_id(db_client, &account_id).await?;
let relationship = if let Some(actor_value) = profile.actor_json {
// Remote follow
let request = follows::create_follow_request(db_client, &current_user.id, &profile.id).await?;
let actor: Actor = serde_json::from_value(actor_value)
.map_err(|_| HttpError::InternalError)?;
let activity = create_activity_follow(
&config,
&current_user.profile,
&request.id,
&actor.id,
);
let activity_sender = current_user.clone();
actix_rt::spawn(async move {
deliver_activity(
&config,
&activity_sender,
activity,
vec![actor],
).await;
});
follows::get_relationship(db_client, &current_user.id, &profile.id).await?
} else {
follows::follow(db_client, &current_user.id, &profile.id).await?
};
Ok(HttpResponse::Ok().json(relationship))
}
#[post("/{account_id}/unfollow")]
async fn unfollow(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
web::Path(account_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &mut **get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let target_profile = get_profile_by_id(db_client, &account_id).await?;
let relationship = if let Some(actor_value) = target_profile.actor_json {
// Remote follow
let follow_request = follows::get_follow_request_by_path(
db_client,
&current_user.id,
&target_profile.id,
).await?;
let relationship = follows::unfollow(
db_client,
&current_user.id,
&target_profile.id,
).await?;
// Federate
let actor: Actor = serde_json::from_value(actor_value)
.map_err(|_| HttpError::InternalError)?;
let activity = create_activity_undo_follow(
&config,
&current_user.profile,
&follow_request.id,
&actor.id,
);
actix_rt::spawn(async move {
deliver_activity(
&config,
&current_user,
activity,
vec![actor],
).await;
});
// TODO: uncouple unfollow and get_relationship
relationship
} else {
follows::unfollow(db_client, &current_user.id, &target_profile.id).await?
};
Ok(HttpResponse::Ok().json(relationship))
}
#[get("/{account_id}/statuses")]
async fn get_account_statuses(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
web::Path(account_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let posts = get_posts_by_author(db_client, &account_id).await?;
let statuses: Vec<Status> = posts.into_iter()
.map(|post| Status::from_post(post, &config.instance_url()))
.collect();
Ok(HttpResponse::Ok().json(statuses))
}
pub fn account_api_scope() -> Scope {
web::scope("/api/v1/accounts")
// Routes without account ID
.service(get_relationships)
.service(update_credentials)
// Routes with account ID
.service(get_account)
.service(follow)
.service(unfollow)
.service(get_account_statuses)
}

View file

@ -0,0 +1 @@
pub mod views;

View file

@ -0,0 +1,24 @@
use actix_session::Session;
use actix_web::{get, web, HttpResponse};
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::mastodon_api::accounts::types::Account;
use crate::mastodon_api::users::auth::get_current_user;
use crate::models::profiles::queries::get_profiles;
#[get("/api/v1/directory")]
pub async fn profile_directory(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
get_current_user(db_client, session).await?;
let accounts: Vec<Account> = get_profiles(db_client).await?
.into_iter()
.map(|profile| Account::from_profile(profile, &config.instance_url()))
.collect();
Ok(HttpResponse::Ok().json(accounts))
}

View file

@ -0,0 +1,2 @@
pub mod types;
pub mod views;

View file

@ -0,0 +1,40 @@
use serde::Serialize;
use crate::config::Config;
use crate::ethereum::nft::MINTER;
#[derive(Serialize)]
pub struct Instance {
uri: String,
title: String,
short_description: String,
description: String,
version: String,
registrations: bool,
login_message: String,
ethereum_explorer_url: Option<String>,
nft_contract_name: Option<String>,
nft_contract_address: Option<String>,
ipfs_gateway_url: Option<String>,
}
impl From<&Config> for Instance {
fn from(config: &Config) -> Self {
Self {
uri: config.instance_uri.clone(),
title: config.instance_title.clone(),
short_description: config.instance_short_description.clone(),
description: config.instance_description.clone(),
version: config.version.clone(),
registrations: config.registrations_open.clone(),
login_message: config.login_message.clone(),
ethereum_explorer_url: config.ethereum_explorer_url.clone(),
nft_contract_name: config.ethereum_contract.as_ref()
.and(Some(MINTER.into())),
nft_contract_address: config.ethereum_contract.as_ref()
.map(|val| val.address.clone()),
ipfs_gateway_url: config.ipfs_gateway_url.clone(),
}
}
}

View file

@ -0,0 +1,13 @@
use actix_web::{get, web, HttpResponse};
use crate::config::Config;
use crate::errors::HttpError;
use super::types::Instance;
#[get("/api/v1/instance")]
pub async fn instance(
instance_config: web::Data<Config>,
) -> Result<HttpResponse, HttpError> {
let instance = Instance::from(instance_config.as_ref());
Ok(HttpResponse::Ok().json(instance))
}

View file

@ -0,0 +1,2 @@
pub mod types;
pub mod views;

View file

@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::models::attachments::types::{
DbMediaAttachment,
AttachmentType,
};
use crate::utils::files::get_file_url;
/// https://docs.joinmastodon.org/methods/statuses/media/
#[derive(Deserialize)]
pub struct AttachmentCreateData {
// base64-encoded file
pub file: String,
}
/// https://docs.joinmastodon.org/entities/attachment/
#[derive(Serialize)]
pub struct Attachment {
pub id: Uuid,
#[serde(rename = "type")]
pub attachment_type: String,
pub url: String,
}
impl Attachment {
pub fn from_db(db_object: DbMediaAttachment, instance_url: &str) -> Self {
let attachment_type = AttachmentType::from_media_type(db_object.media_type);
let attachment_type_mastodon = match attachment_type {
AttachmentType::Unknown => "unknown",
AttachmentType::Image => "image",
};
let attachment_url = get_file_url(instance_url, &db_object.file_name);
Self {
id: db_object.id,
attachment_type: attachment_type_mastodon.to_string(),
url: attachment_url,
}
}
}

View file

@ -0,0 +1,44 @@
use actix_session::Session;
use actix_web::{post, web, HttpResponse, Scope};
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::mastodon_api::users::auth::get_current_user;
use crate::models::attachments::queries::create_attachment;
use crate::utils::files::{FileError, save_b64_file};
use super::types::{AttachmentCreateData, Attachment};
#[post("")]
async fn create_attachment_view(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
data: web::Json<AttachmentCreateData>,
) -> Result<HttpResponse, HttpError> {
let db_client = get_database_client(&db_pool).await?;
let current_user = get_current_user(&**db_client, session).await?;
let (file_name, media_type) = save_b64_file(
&data.file,
&config.media_dir(),
).map_err(|err| match err {
FileError::Base64DecodingError(err) => HttpError::ValidationError(err.to_string()),
_ => HttpError::InternalError,
})?;
let db_attachment = create_attachment(
&**db_client,
&current_user.id,
media_type,
file_name,
).await?;
let attachment = Attachment::from_db(
db_attachment,
&config.instance_url(),
);
Ok(HttpResponse::Ok().json(attachment))
}
pub fn media_api_scope() -> Scope {
web::scope("/api/v1/media")
.service(create_attachment_view)
}

8
src/mastodon_api/mod.rs Normal file
View file

@ -0,0 +1,8 @@
pub mod accounts;
pub mod directory;
pub mod instance;
pub mod media;
pub mod search;
pub mod statuses;
pub mod timelines;
pub mod users;

View file

@ -0,0 +1,3 @@
pub mod queries;
pub mod types;
pub mod views;

View file

@ -0,0 +1,47 @@
use regex::Regex;
use tokio_postgres::GenericClient;
use crate::activitypub::fetcher::fetch_profile;
use crate::config::Config;
use crate::errors::{ValidationError, HttpError};
use crate::models::profiles::queries::{create_profile, search_profile};
use crate::models::profiles::types::DbActorProfile;
fn parse_search_query(query: &str) ->
Result<(String, Option<String>), ValidationError>
{
let acct_regexp = Regex::new(r"^@?(?P<user>\w+)(@(?P<instance>[\w\.-]+))?").unwrap();
let acct_caps = acct_regexp.captures(query)
.ok_or(ValidationError("invalid search query"))?;
let username = acct_caps.name("user")
.ok_or(ValidationError("invalid search query"))?
.as_str().to_string();
let instance = acct_caps.name("instance")
.and_then(|val| Some(val.as_str().to_string()));
Ok((username, instance))
}
pub async fn search(
config: &Config,
db_client: &impl GenericClient,
search_query: &str,
) -> Result<Vec<DbActorProfile>, HttpError> {
let (username, instance) = parse_search_query(search_query)?;
let mut profiles = search_profile(db_client, &username, &instance).await?;
if profiles.len() == 0 && instance.is_some() {
let instance_uri = instance.unwrap();
let media_dir = config.media_dir();
let profile_data = fetch_profile(&username, &instance_uri, &media_dir).await
.map_err(|err| {
log::warn!("{}", err);
HttpError::NotFoundError("remote profile")
})?;
let profile = create_profile(db_client, &profile_data).await?;
log::info!(
"imported profile '{}'",
profile.acct,
);
profiles.push(profile);
}
Ok(profiles)
}

View file

@ -0,0 +1,9 @@
use serde::Serialize;
use crate::mastodon_api::accounts::types::Account;
/// https://docs.joinmastodon.org/methods/search/
#[derive(Serialize)]
pub struct SearchResults {
pub accounts: Vec<Account>,
}

View file

@ -0,0 +1,33 @@
use actix_session::Session;
use actix_web::{get, web, HttpResponse};
use serde::Deserialize;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::mastodon_api::accounts::types::Account;
use crate::mastodon_api::users::auth::get_current_user;
use super::queries;
use super::types::SearchResults;
#[derive(Deserialize)]
struct SearchQueryParams {
q: String,
}
#[get("/api/v2/search")]
async fn search(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
query_params: web::Query<SearchQueryParams>,
) -> Result<HttpResponse, HttpError> {
let db_client = get_database_client(&db_pool).await?;
get_current_user(&**db_client, session).await?;
let profiles = queries::search(&config, &**db_client, &query_params.q).await?;
let accounts: Vec<Account> = profiles.into_iter()
.map(|profile| Account::from_profile(profile, &config.instance_url()))
.collect();
let results = SearchResults { accounts };
Ok(HttpResponse::Ok().json(results))
}

View file

@ -0,0 +1,2 @@
pub mod types;
pub mod views;

View file

@ -0,0 +1,61 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::mastodon_api::accounts::types::Account;
use crate::mastodon_api::media::types::Attachment;
use crate::models::posts::types::{Post, PostCreateData};
/// https://docs.joinmastodon.org/entities/status/
#[derive(Serialize)]
pub struct Status {
pub id: Uuid,
pub created_at: DateTime<Utc>,
pub account: Account,
pub content: String,
pub media_attachments: Vec<Attachment>,
// Extra fields
pub ipfs_cid: Option<String>,
pub token_id: Option<i32>,
pub token_tx_id: Option<String>,
}
impl Status {
pub fn from_post(post: Post, instance_url: &str) -> Self {
let attachments: Vec<Attachment> = post.attachments.into_iter()
.map(|item| Attachment::from_db(item, instance_url))
.collect();
let account = Account::from_profile(post.author, instance_url);
Self {
id: post.id,
created_at: post.created_at,
account: account,
content: post.content,
media_attachments: attachments,
ipfs_cid: post.ipfs_cid,
token_id: post.token_id,
token_tx_id: post.token_tx_id,
}
}
}
/// https://docs.joinmastodon.org/methods/statuses/
#[derive(Deserialize)]
pub struct StatusData {
pub status: String,
#[serde(rename = "media_ids[]")]
pub media_ids: Option<Vec<Uuid>>,
}
impl From<StatusData> for PostCreateData {
fn from(value: StatusData) -> Self {
Self {
content: value.status,
attachments: value.media_ids.unwrap_or(vec![]),
created_at: None,
}
}
}

View file

@ -0,0 +1,159 @@
use actix_session::Session;
use actix_web::{get, post, web, HttpResponse, Scope};
use serde::Serialize;
use uuid::Uuid;
use crate::activitypub::activity::create_activity_note;
use crate::activitypub::actor::Actor;
use crate::activitypub::deliverer::deliver_activity;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::ethereum::nft::create_mint_signature;
use crate::ipfs::store as ipfs_store;
use crate::ipfs::utils::{IPFS_LOGO, get_ipfs_url};
use crate::mastodon_api::users::auth::get_current_user;
use crate::models::profiles::queries::get_followers;
use crate::models::posts::queries::{create_post, get_post_by_id, update_post};
use crate::models::posts::types::PostCreateData;
use super::types::{Status, StatusData};
#[post("")]
async fn create_status(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
data: web::Json<StatusData>,
) -> Result<HttpResponse, HttpError> {
let db_client = &mut **get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let mut post_data = PostCreateData::from(data.into_inner());
post_data.validate()?;
let post = create_post(db_client, &current_user.id, post_data).await?;
let status = Status::from_post(post.clone(), &config.instance_url());
// Federate
let activity = create_activity_note(&config, &post);
let followers = get_followers(db_client, &current_user.id).await?;
let mut recipients: Vec<Actor> = Vec::new();
for follower in followers {
if let Some(actor_value) = follower.actor_json {
// Remote
let actor: Actor = serde_json::from_value(actor_value)
.map_err(|_| HttpError::InternalError)?;
recipients.push(actor);
};
};
actix_rt::spawn(async move {
deliver_activity(
&config,
&current_user,
activity,
recipients,
).await;
});
Ok(HttpResponse::Created().json(status))
}
#[get("/{status_id}")]
async fn get_status(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
web::Path(status_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let post = get_post_by_id(db_client, &status_id).await?;
let status = Status::from_post(post, &config.instance_url());
Ok(HttpResponse::Ok().json(status))
}
// https://docs.opensea.io/docs/metadata-standards
#[derive(Serialize)]
struct PostMetadata {
name: String,
description: String,
image: String,
external_url: String,
}
#[post("/{status_id}/make_permanent")]
async fn make_permanent(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
web::Path(status_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
get_current_user(db_client, session).await?;
let mut post = get_post_by_id(db_client, &status_id).await?;
let ipfs_api_url = config.ipfs_api_url.as_ref()
.ok_or(HttpError::NotSupported)?;
let post_image_cid = if let Some(attachment) = post.attachments.first() {
// Add attachment to IPFS
let image_path = config.media_dir().join(&attachment.file_name);
let image_data = std::fs::read(image_path)
.map_err(|_| HttpError::InternalError)?;
let image_cid = ipfs_store::add(&ipfs_api_url, image_data).await
.map_err(|_| HttpError::InternalError)?;
image_cid
} else {
// Use IPFS logo if there's no image
IPFS_LOGO.to_string()
};
let post_metadata = PostMetadata {
name: format!("Post {}", post.id),
description: post.content.clone(),
image: get_ipfs_url(&post_image_cid),
// TODO: use absolute URL
external_url: format!("/post/{}", post.id),
};
let post_metadata_json = serde_json::to_string(&post_metadata)
.map_err(|_| HttpError::InternalError)?
.as_bytes().to_vec();
let post_metadata_cid = ipfs_store::add(&ipfs_api_url, post_metadata_json).await
.map_err(|_| HttpError::InternalError)?;
// Update post
post.ipfs_cid = Some(post_metadata_cid);
update_post(db_client, &post).await?;
let status = Status::from_post(post, &config.instance_url());
Ok(HttpResponse::Ok().json(status))
}
#[get("/{status_id}/signature")]
async fn get_signature(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
web::Path(status_id): web::Path<Uuid>,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let contract_config = config.ethereum_contract.as_ref()
.ok_or(HttpError::NotSupported)?;
let post = get_post_by_id(db_client, &status_id).await?;
if post.author.id != current_user.id {
// Users can only tokenize their own posts
Err(HttpError::NotFoundError("post"))?;
}
let ipfs_cid = post.ipfs_cid
// Post metadata is not immutable
.ok_or(HttpError::ValidationError("post is not immutable".into()))?;
let token_uri = get_ipfs_url(&ipfs_cid);
let signature = create_mint_signature(
&contract_config,
&current_user.wallet_address,
&token_uri,
).map_err(|_| HttpError::InternalError)?;
Ok(HttpResponse::Ok().json(signature))
}
pub fn status_api_scope() -> Scope {
web::scope("/api/v1/statuses")
// Routes without status ID
.service(create_status)
// Routes with status ID
.service(get_status)
.service(make_permanent)
.service(get_signature)
}

View file

@ -0,0 +1 @@
pub mod views;

View file

@ -0,0 +1,24 @@
use actix_session::Session;
use actix_web::{get, web, HttpResponse};
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::mastodon_api::statuses::types::Status;
use crate::mastodon_api::users::auth::get_current_user;
use crate::models::posts::queries::get_posts;
#[get("/api/v1/timelines/home")]
pub async fn home_timeline(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let current_user = get_current_user(db_client, session).await?;
let statuses: Vec<Status> = get_posts(db_client, &current_user.id).await?
.into_iter()
.map(|post| Status::from_post(post, &config.instance_url()))
.collect();
Ok(HttpResponse::Ok().json(statuses))
}

View file

@ -0,0 +1,25 @@
use actix_session::Session;
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::HttpError;
use crate::models::users::queries::get_user_by_id;
use crate::models::users::types::User;
pub async fn get_current_user(
db_client: &impl GenericClient,
session: Session,
) -> Result<User, HttpError> {
let maybe_user_id = session.get::<String>("id")
.map_err(|_| HttpError::SessionError("failed to read cookie"))?;
if let Some(user_id) = maybe_user_id {
let user_uuid = Uuid::parse_str(&user_id)
.map_err(|_| HttpError::SessionError("invalid uuid"))?;
let user = get_user_by_id(db_client, &user_uuid)
.await
.map_err(|_| HttpError::SessionError("user not found"))?;
Ok(user)
} else {
return Err(HttpError::SessionError("session not found"));
}
}

View file

@ -0,0 +1,3 @@
pub mod auth;
mod types;
pub mod views;

View file

@ -0,0 +1,24 @@
use serde::Serialize;
use uuid::Uuid;
use crate::mastodon_api::accounts::types::Account;
use crate::models::users::types::User;
// TODO: use Account instead
#[derive(Serialize)]
pub struct ApiUser {
pub id: Uuid,
pub profile: Account,
pub wallet_address: String,
}
impl ApiUser {
pub fn from_user(user: User, instance_url: &str) -> Self {
let account = Account::from_profile(user.profile, instance_url);
Self {
id: user.id,
profile: account,
wallet_address: user.wallet_address,
}
}
}

View file

@ -0,0 +1,105 @@
use actix_session::Session;
use actix_web::{
get, post, web,
HttpResponse,
};
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::{HttpError, ValidationError};
use crate::models::users::queries::{
is_valid_invite_code,
create_user,
get_user_by_wallet_address,
};
use crate::models::users::types::{
UserRegistrationData,
UserLoginData,
};
use crate::utils::crypto::{
hash_password,
verify_password,
generate_private_key,
serialize_private_key,
};
use super::auth::get_current_user;
use super::types::ApiUser;
// /api/v1/accounts
#[post("/api/v0/create")]
async fn create_user_view(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
form: web::Json<UserRegistrationData>,
session: Session,
) -> Result<HttpResponse, HttpError> {
let db_client = &mut **get_database_client(&db_pool).await?;
// Validate
form.clean()?;
if !config.registrations_open {
let invite_code = form.invite_code.as_ref()
.ok_or(ValidationError("invite code is required"))?;
if !is_valid_invite_code(db_client, &invite_code).await? {
Err(ValidationError("invalid invite code"))?;
}
}
// Hash password and generate private key
let password_hash = hash_password(&form.signature)
.map_err(|_| HttpError::InternalError)?;
let private_key = match web::block(move || generate_private_key()).await {
Ok(private_key) => private_key,
Err(_) => return Err(HttpError::InternalError),
};
let private_key_pem = serialize_private_key(private_key)
.map_err(|_| HttpError::InternalError)?;
let user = create_user(
db_client,
form.into_inner(),
password_hash,
private_key_pem,
).await?;
session.set("id", user.id)?;
let api_user = ApiUser::from_user(user, &config.instance_url());
Ok(HttpResponse::Created().json(api_user))
}
#[post("/api/v0/login")]
async fn login_view(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
form: web::Json<UserLoginData>,
session: Session,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let user = get_user_by_wallet_address(db_client, &form.wallet_address).await?;
let result = verify_password(&user.password_hash, &form.signature)
.map_err(|_| ValidationError("incorrect password"))?;
if !result {
// Invalid signature/password
Err(ValidationError("incorrect password"))?;
}
session.set("id", &user.id)?;
let api_user = ApiUser::from_user(user, &config.instance_url());
Ok(HttpResponse::Ok().json(api_user))
}
#[get("/api/v0/current-user")]
async fn current_user_view(
config: web::Data<Config>,
db_pool: web::Data<Pool>,
session: Session,
) -> Result<HttpResponse, HttpError> {
let db_client = &**get_database_client(&db_pool).await?;
let user = get_current_user(db_client, session).await?;
let api_user = ApiUser::from_user(user, &config.instance_url());
Ok(HttpResponse::Ok().json(api_user))
}
#[post("/api/v0/logout")]
async fn logout_view(
session: Session,
) -> Result<HttpResponse, HttpError> {
session.clear();
Ok(HttpResponse::Ok().body("logged out"))
}

View file

@ -0,0 +1,2 @@
pub mod queries;
pub mod types;

View file

@ -0,0 +1,24 @@
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::DatabaseError;
use super::types::DbMediaAttachment;
pub async fn create_attachment(
db_client: &impl GenericClient,
owner_id: &Uuid,
media_type: Option<String>,
file_name: String,
) -> Result<DbMediaAttachment, DatabaseError> {
let attachment_id = Uuid::new_v4();
let inserted_row = db_client.query_one(
"
INSERT INTO media_attachment (id, owner_id, media_type, file_name)
VALUES ($1, $2, $3, $4)
RETURNING media_attachment
",
&[&attachment_id, &owner_id, &media_type, &file_name],
).await?;
let db_attachment: DbMediaAttachment = inserted_row.try_get("media_attachment")?;
Ok(db_attachment)
}

View file

@ -0,0 +1,34 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use uuid::Uuid;
#[derive(Clone, FromSql)]
#[postgres(name = "media_attachment")]
pub struct DbMediaAttachment {
pub id: Uuid,
pub owner_id: Uuid,
pub media_type: Option<String>,
pub file_name: String,
pub post_id: Option<Uuid>,
pub created_at: DateTime<Utc>,
}
pub enum AttachmentType {
Unknown,
Image,
}
impl AttachmentType {
pub fn from_media_type(value: Option<String>) -> Self {
match value {
Some(media_type) => {
if media_type.starts_with("image/") {
Self::Image
} else {
Self::Unknown
}
},
None => Self::Unknown,
}
}
}

5
src/models/mod.rs Normal file
View file

@ -0,0 +1,5 @@
pub mod attachments;
pub mod posts;
pub mod profiles;
pub mod relationships;
pub mod users;

2
src/models/posts/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod queries;
pub mod types;

207
src/models/posts/queries.rs Normal file
View file

@ -0,0 +1,207 @@
use std::convert::TryFrom;
use chrono::Utc;
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::DatabaseError;
use crate::models::attachments::types::DbMediaAttachment;
use crate::models::profiles::queries::update_post_count;
use super::types::{DbPost, Post, PostCreateData};
pub async fn get_posts(
db_client: &impl GenericClient,
current_user_id: &Uuid,
) -> Result<Vec<Post>, DatabaseError> {
// Select posts from follows + own posts
let rows = db_client.query(
"
SELECT
post, actor_profile,
ARRAY(
SELECT media_attachment
FROM media_attachment WHERE post_id = post.id
) AS attachments
FROM post
JOIN actor_profile ON post.author_id = actor_profile.id
WHERE
post.author_id = $1
OR EXISTS (
SELECT 1 FROM relationship
WHERE source_id = $1 AND target_id = post.author_id
)
ORDER BY post.created_at DESC
",
&[&current_user_id],
).await?;
let posts: Vec<Post> = rows.iter()
.map(|row| Post::try_from(row))
.collect::<Result<_, _>>()?;
Ok(posts)
}
pub async fn get_posts_by_author(
db_client: &impl GenericClient,
account_id: &Uuid,
) -> Result<Vec<Post>, DatabaseError> {
let rows = db_client.query(
"
SELECT
post, actor_profile,
ARRAY(
SELECT media_attachment
FROM media_attachment WHERE post_id = post.id
) AS attachments
FROM post
JOIN actor_profile ON post.author_id = actor_profile.id
WHERE
post.author_id = $1
ORDER BY post.created_at DESC
",
&[&account_id],
).await?;
let posts: Vec<Post> = rows.iter()
.map(|row| Post::try_from(row))
.collect::<Result<_, _>>()?;
Ok(posts)
}
pub async fn create_post(
db_client: &mut impl GenericClient,
author_id: &Uuid,
data: PostCreateData,
) -> Result<Post, DatabaseError> {
let transaction = db_client.transaction().await?;
let post_id = uuid::Uuid::new_v4();
let created_at = data.created_at.unwrap_or(Utc::now());
let post_row = transaction.query_one(
"
INSERT INTO post (id, author_id, content, created_at)
VALUES ($1, $2, $3, $4)
RETURNING post
",
&[&post_id, &author_id, &data.content, &created_at],
).await?;
let attachment_rows = transaction.query(
"
UPDATE media_attachment
SET post_id = $1
WHERE id = ANY($2)
RETURNING media_attachment
",
&[&post_id, &data.attachments],
).await?;
let db_attachments: Vec<DbMediaAttachment> = attachment_rows.iter()
.map(|row| -> Result<DbMediaAttachment, tokio_postgres::Error> {
row.try_get("media_attachment")
})
.collect::<Result<_, _>>()?;
let db_post: DbPost = post_row.try_get("post")?;
let author = update_post_count(&transaction, &db_post.author_id, 1).await?;
transaction.commit().await?;
let post = Post {
id: db_post.id,
author: author,
content: db_post.content,
attachments: db_attachments,
ipfs_cid: db_post.ipfs_cid,
token_id: db_post.token_id,
token_tx_id: db_post.token_tx_id,
created_at: db_post.created_at,
};
Ok(post)
}
pub async fn get_post_by_id(
db_client: &impl GenericClient,
post_id: &Uuid,
) -> Result<Post, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT
post, actor_profile,
ARRAY(
SELECT media_attachment
FROM media_attachment WHERE post_id = post.id
) AS attachments
FROM post
JOIN actor_profile ON post.author_id = actor_profile.id
WHERE post.id = $1
",
&[&post_id],
).await?;
let post = match maybe_row {
Some(row) => Post::try_from(&row)?,
None => return Err(DatabaseError::NotFound("post")),
};
Ok(post)
}
pub async fn get_post_by_ipfs_cid(
db_client: &impl GenericClient,
ipfs_cid: &str,
) -> Result<Post, DatabaseError> {
let result = db_client.query_opt(
"
SELECT
post, actor_profile,
ARRAY(
SELECT media_attachment
FROM media_attachment WHERE post_id = post.id
) AS attachments
FROM post
JOIN actor_profile ON post.author_id = actor_profile.id
WHERE post.ipfs_cid = $1
",
&[&ipfs_cid],
).await?;
let post = match result {
Some(row) => Post::try_from(&row)?,
None => return Err(DatabaseError::NotFound("post")),
};
Ok(post)
}
pub async fn update_post(
db_client: &impl GenericClient,
post: &Post,
) -> Result<(), DatabaseError> {
// TODO: create PostUpdateData type
let updated_count = db_client.execute(
"
UPDATE post
SET
content = $1,
ipfs_cid = $2,
token_id = $3,
token_tx_id = $4
WHERE id = $5
",
&[
&post.content,
&post.ipfs_cid,
&post.token_id,
&post.token_tx_id,
&post.id,
],
).await?;
if updated_count == 0 {
return Err(DatabaseError::NotFound("post"));
}
Ok(())
}
pub async fn is_waiting_for_token(
db_client: &impl GenericClient,
) -> Result<bool, DatabaseError> {
let row = db_client.query_one(
"
SELECT count(post) > 0 AS is_waiting
FROM post
WHERE ipfs_cid IS NOT NULL AND token_id IS NULL
",
&[],
).await?;
let is_waiting: bool = row.try_get("is_waiting")?;
Ok(is_waiting)
}

102
src/models/posts/types.rs Normal file
View file

@ -0,0 +1,102 @@
use std::convert::TryFrom;
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use tokio_postgres::Row;
use uuid::Uuid;
use crate::errors::ValidationError;
use crate::models::attachments::types::DbMediaAttachment;
use crate::models::profiles::types::DbActorProfile;
use crate::utils::html::clean_html;
#[derive(FromSql)]
#[postgres(name = "post")]
pub struct DbPost {
pub id: Uuid,
pub author_id: Uuid,
pub content: String,
pub ipfs_cid: Option<String>,
pub token_id: Option<i32>,
pub token_tx_id: Option<String>,
pub created_at: DateTime<Utc>,
}
#[derive(Clone)]
pub struct Post {
pub id: Uuid,
pub author: DbActorProfile,
pub content: String,
pub attachments: Vec<DbMediaAttachment>,
pub ipfs_cid: Option<String>,
pub token_id: Option<i32>,
pub token_tx_id: Option<String>,
pub created_at: DateTime<Utc>,
}
impl TryFrom<&Row> for Post {
type Error = tokio_postgres::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let db_post: DbPost = row.try_get("post")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let db_attachments: Vec<DbMediaAttachment> = row.try_get("attachments")?;
let post = Post {
id: db_post.id,
author: db_profile,
content: db_post.content,
attachments: db_attachments,
ipfs_cid: db_post.ipfs_cid,
token_id: db_post.token_id,
token_tx_id: db_post.token_tx_id,
created_at: db_post.created_at,
};
Ok(post)
}
}
pub struct PostCreateData {
pub content: String,
pub attachments: Vec<Uuid>,
pub created_at: Option<DateTime<Utc>>,
}
impl PostCreateData {
/// Validate and clean post data.
pub fn validate(&mut self) -> Result<(), ValidationError> {
let content_safe = clean_html(&self.content);
let content_trimmed = content_safe.trim();
if content_trimmed == "" {
return Err(ValidationError("post can not be empty"));
}
self.content = content_trimmed.to_string();
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validate_post_data() {
let mut post_data_1 = PostCreateData {
content: " ".to_string(),
attachments: vec![],
created_at: None,
};
assert_eq!(post_data_1.validate().is_ok(), false);
}
#[test]
fn test_trimming() {
let mut post_data_2 = PostCreateData {
content: "test ".to_string(),
attachments: vec![],
created_at: None,
};
assert_eq!(post_data_2.validate().is_ok(), true);
assert_eq!(post_data_2.content.as_str(), "test");
}
}

View file

@ -0,0 +1,2 @@
pub mod queries;
pub mod types;

View file

@ -0,0 +1,272 @@
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::DatabaseError;
use super::types::{DbActorProfile, ProfileCreateData, ProfileUpdateData};
/// Create new profile using given Client or Transaction.
pub async fn create_profile(
db_client: &impl GenericClient,
profile_data: &ProfileCreateData,
) -> Result<DbActorProfile, DatabaseError> {
let profile_id = Uuid::new_v4();
let result = db_client.query_one(
"
INSERT INTO actor_profile (
id, username, display_name, acct, bio, bio_source,
avatar_file_name, banner_file_name,
actor_json
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING actor_profile
",
&[
&profile_id,
&profile_data.username,
&profile_data.display_name,
&profile_data.acct,
&profile_data.bio,
&profile_data.bio,
&profile_data.avatar,
&profile_data.banner,
&profile_data.actor,
],
).await;
let profile = match result {
Ok(row) => row.try_get("actor_profile")?,
Err(err) => {
// TODO: catch profile already exists error
log::warn!("{}", err);
return Err(DatabaseError::AlreadyExists("profile"));
},
};
Ok(profile)
}
pub async fn update_profile(
db_client: &impl GenericClient,
profile_id: &Uuid,
data: ProfileUpdateData,
) -> Result<DbActorProfile, DatabaseError> {
let maybe_row = db_client.query_opt(
"
UPDATE actor_profile
SET
display_name = $1,
bio = $2,
bio_source = $3,
avatar_file_name = $4,
banner_file_name = $5
WHERE id = $6
RETURNING actor_profile
",
&[
&data.display_name,
&data.bio,
&data.bio_source,
&data.avatar,
&data.banner,
&profile_id,
],
).await?;
let profile = match maybe_row {
Some(row) => row.try_get("actor_profile")?,
None => return Err(DatabaseError::NotFound("profile")),
};
Ok(profile)
}
pub async fn get_profile_by_id(
db_client: &impl GenericClient,
profile_id: &Uuid,
) -> Result<DbActorProfile, DatabaseError> {
let result = db_client.query_opt(
"
SELECT actor_profile
FROM actor_profile
WHERE id = $1
",
&[&profile_id],
).await?;
let profile = match result {
Some(row) => row.try_get("actor_profile")?,
None => return Err(DatabaseError::NotFound("profile")),
};
Ok(profile)
}
pub async fn get_profile_by_actor_id(
db_client: &impl GenericClient,
actor_id: &str,
) -> Result<DbActorProfile, DatabaseError> {
let result = db_client.query_opt(
"
SELECT actor_profile
FROM actor_profile
WHERE actor_profile.actor_json ->> 'id' = $1
",
&[&actor_id],
).await?;
let profile = match result {
Some(row) => row.try_get("actor_profile")?,
None => return Err(DatabaseError::NotFound("profile")),
};
Ok(profile)
}
pub async fn get_profile_by_acct(
db_client: &impl GenericClient,
account_uri: &str,
) -> Result<DbActorProfile, DatabaseError> {
let result = db_client.query_opt(
"
SELECT actor_profile
FROM actor_profile
WHERE actor_profile.acct = $1
",
&[&account_uri],
).await?;
let profile = match result {
Some(row) => row.try_get("actor_profile")?,
None => return Err(DatabaseError::NotFound("profile")),
};
Ok(profile)
}
pub async fn get_profiles(
db_client: &impl GenericClient,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let rows = db_client.query(
"
SELECT actor_profile
FROM actor_profile
ORDER BY username
",
&[],
).await?;
let profiles = rows.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<Vec<DbActorProfile>, _>>()?;
Ok(profiles)
}
pub async fn get_followers(
db_client: &impl GenericClient,
profile_id: &Uuid,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let rows = db_client.query(
"
SELECT actor_profile
FROM actor_profile
JOIN relationship
ON (actor_profile.id = relationship.source_id)
WHERE relationship.target_id = $1
",
&[&profile_id],
).await?;
let profiles = rows.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<Vec<DbActorProfile>, _>>()?;
Ok(profiles)
}
pub async fn delete_profile(
db_client: &impl GenericClient,
profile_id: &Uuid,
) -> Result<(), DatabaseError> {
let deleted_count = db_client.execute(
"DELETE FROM actor_profile WHERE id = $1",
&[&profile_id],
).await?;
if deleted_count == 0 {
return Err(DatabaseError::NotFound("profile"));
}
Ok(())
}
pub async fn search_profile(
db_client: &impl GenericClient,
username: &String,
instance: &Option<String>,
) -> Result<Vec<DbActorProfile>, DatabaseError> {
let db_search_query = match &instance {
Some(instance) => {
// Search for exact profile name.
// Fetch from remote server if not found
format!("{}@{}", username, instance)
},
None => {
// Search for username
format!("%{}%", username)
},
};
let rows = db_client.query(
"
SELECT actor_profile
FROM actor_profile
WHERE acct LIKE $1
",
&[&db_search_query],
).await?;
let profiles: Vec<DbActorProfile> = rows.iter()
.map(|row| row.try_get("actor_profile"))
.collect::<Result<_, _>>()?;
Ok(profiles)
}
pub async fn update_follower_count(
db_client: &impl GenericClient,
profile_id: &Uuid,
change: i32,
) -> Result<DbActorProfile, DatabaseError> {
let maybe_row = db_client.query_opt(
"
UPDATE actor_profile
SET follower_count = follower_count + $1
WHERE id = $2
RETURNING actor_profile
",
&[&change, &profile_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("profile"))?;
let profile = row.try_get("actor_profile")?;
Ok(profile)
}
pub async fn update_following_count(
db_client: &impl GenericClient,
profile_id: &Uuid,
change: i32,
) -> Result<DbActorProfile, DatabaseError> {
let maybe_row = db_client.query_opt(
"
UPDATE actor_profile
SET following_count = following_count + $1
WHERE id = $2
RETURNING actor_profile
",
&[&change, &profile_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("profile"))?;
let profile = row.try_get("actor_profile")?;
Ok(profile)
}
pub async fn update_post_count(
db_client: &impl GenericClient,
profile_id: &Uuid,
change: i32,
) -> Result<DbActorProfile, DatabaseError> {
let maybe_row = db_client.query_opt(
"
UPDATE actor_profile
SET post_count = post_count + $1
WHERE id = $2
RETURNING actor_profile
",
&[&change, &profile_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("profile"))?;
let profile = row.try_get("actor_profile")?;
Ok(profile)
}

View file

@ -0,0 +1,51 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use serde_json::Value;
use uuid::Uuid;
use crate::errors::ValidationError;
use crate::utils::html::clean_html;
#[derive(Clone, FromSql)]
#[postgres(name = "actor_profile")]
pub struct DbActorProfile {
pub id: Uuid,
pub username: String,
pub acct: String,
pub display_name: Option<String>,
pub bio: Option<String>, // html
pub bio_source: Option<String>, // plaintext or markdown
pub avatar_file_name: Option<String>,
pub banner_file_name: Option<String>,
pub follower_count: i32,
pub following_count: i32,
pub post_count: i32,
pub created_at: DateTime<Utc>,
pub actor_json: Option<Value>,
}
pub struct ProfileCreateData {
pub username: String,
pub display_name: Option<String>,
pub acct: String,
pub bio: Option<String>,
pub avatar: Option<String>,
pub banner: Option<String>,
pub actor: Option<Value>,
}
pub struct ProfileUpdateData {
pub display_name: Option<String>,
pub bio: Option<String>,
pub bio_source: Option<String>,
pub avatar: Option<String>,
pub banner: Option<String>,
}
impl ProfileUpdateData {
/// Validate and clean bio.
pub fn clean(&mut self) -> Result<(), ValidationError> {
self.bio = self.bio.as_ref().map(|val| clean_html(val));
Ok(())
}
}

View file

@ -0,0 +1,2 @@
pub mod queries;
pub mod types;

View file

@ -0,0 +1,230 @@
use std::convert::TryFrom;
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::DatabaseError;
use crate::models::profiles::queries::{
update_follower_count,
update_following_count,
};
use super::types::{
DbFollowRequest,
FollowRequest,
FollowRequestStatus,
Relationship,
};
pub async fn get_relationships(
db_client: &impl GenericClient,
source_id: Uuid,
target_ids: Vec<Uuid>,
) -> Result<Vec<Relationship>, DatabaseError> {
let rows = db_client.query(
"
SELECT
actor_profile.id,
EXISTS (
SELECT 1 FROM relationship
WHERE source_id = $1 AND target_id = actor_profile.id
) AS following,
EXISTS (
SELECT 1 FROM relationship
WHERE source_id = actor_profile.id AND target_id = $1
) AS followed_by,
EXISTS (
SELECT 1 FROM follow_request
WHERE source_id = $1 AND target_id = actor_profile.id
AND request_status = 1
) AS requested
FROM actor_profile
WHERE actor_profile.id = ANY($2)
",
&[&source_id, &target_ids],
).await?;
let relationships = rows.iter()
.map(|row| Relationship::try_from(row))
.collect::<Result<_, _>>()?;
Ok(relationships)
}
pub async fn get_relationship(
db_client: &impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Relationship, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT
actor_profile.id,
EXISTS (
SELECT 1 FROM relationship
WHERE source_id = $1 AND target_id = actor_profile.id
) AS following,
EXISTS (
SELECT 1 FROM relationship
WHERE source_id = actor_profile.id AND target_id = $1
) AS followed_by,
EXISTS (
SELECT 1 FROM follow_request
WHERE source_id = $1 AND target_id = actor_profile.id
AND request_status = 1
) AS requested
FROM actor_profile
WHERE actor_profile.id = $2
",
&[&source_id, &target_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("profile"))?;
let relationship = Relationship::try_from(&row)?;
Ok(relationship)
}
pub async fn follow(
db_client: &mut impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Relationship, DatabaseError> {
let transaction = db_client.transaction().await?;
let result = transaction.execute(
"
INSERT INTO relationship (source_id, target_id)
VALUES ($1, $2)
",
&[&source_id, &target_id],
).await;
if let Err(err) = result {
log::info!("{}", err);
return Err(DatabaseError::AlreadyExists("relationship"));
};
update_follower_count(&transaction, target_id, 1).await?;
update_following_count(&transaction, source_id, 1).await?;
let relationship = get_relationship(&transaction, source_id, target_id).await?;
transaction.commit().await?;
Ok(relationship)
}
pub async fn unfollow(
db_client: &mut impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<Relationship, DatabaseError> {
let transaction = db_client.transaction().await?;
let deleted_count = transaction.execute(
"
DELETE FROM relationship
WHERE source_id = $1 AND target_id = $2
",
&[&source_id, &target_id],
).await?;
if deleted_count == 0 {
// Relationship not found, try to delete follow request
let follow_request_deleted = delete_follow_request(
&transaction,
source_id,
target_id,
).await?;
if !follow_request_deleted {
return Err(DatabaseError::NotFound("relationship"));
}
} else {
// Update counters only if relationship exists
update_follower_count(&transaction, target_id, -1).await?;
update_following_count(&transaction, source_id, -1).await?;
}
let relationship = get_relationship(&transaction, source_id, target_id).await?;
transaction.commit().await?;
Ok(relationship)
}
pub async fn create_follow_request(
db_client: &impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<FollowRequest, DatabaseError> {
let request = FollowRequest {
id: Uuid::new_v4(),
source_id: source_id.to_owned(),
target_id: target_id.to_owned(),
status: FollowRequestStatus::Pending,
};
db_client.execute(
"
INSERT INTO follow_request (
id, source_id, target_id, request_status
)
VALUES ($1, $2, $3, $4)
",
&[
&request.id,
&request.source_id,
&request.target_id,
&i16::from(request.status.clone()),
],
).await?;
Ok(request)
}
pub async fn accept_follow_request(
db_client: &mut impl GenericClient,
request_id: &Uuid,
) -> Result<(), DatabaseError> {
let mut transaction = db_client.transaction().await?;
let status_sql = i16::from(FollowRequestStatus::Accepted);
let maybe_row = transaction.query_opt(
"
UPDATE follow_request
SET request_status = $1
WHERE id = $2
RETURNING source_id, target_id
",
&[&status_sql, &request_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("follow request"))?;
let source_id: Uuid = row.try_get("source_id")?;
let target_id: Uuid = row.try_get("target_id")?;
follow(&mut transaction, &source_id, &target_id).await?;
transaction.commit().await?;
Ok(())
}
pub async fn delete_follow_request(
db_client: &impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<bool, DatabaseError> {
let deleted_count = db_client.execute(
"
DELETE FROM follow_request
WHERE source_id = $1 AND target_id = $2
",
&[&source_id, &target_id],
).await?;
let is_success = deleted_count > 0;
Ok(is_success)
}
pub async fn get_follow_request_by_path(
db_client: &impl GenericClient,
source_id: &Uuid,
target_id: &Uuid,
) -> Result<FollowRequest, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT follow_request
FROM follow_request
WHERE source_id = $1 AND target_id = $2
",
&[&source_id, &target_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("follow request"))?;
let db_request: DbFollowRequest = row.try_get("follow_request")?;
let request_status = FollowRequestStatus::try_from(db_request.request_status)?;
let request = FollowRequest {
id: db_request.id,
source_id: db_request.source_id,
target_id: db_request.target_id,
status: request_status,
};
Ok(request)
}

View file

@ -0,0 +1,78 @@
use std::convert::TryFrom;
use postgres_types::FromSql;
use serde::Serialize;
use tokio_postgres::Row;
use uuid::Uuid;
use crate::errors::ConversionError;
#[derive(Serialize)]
pub struct Relationship {
pub id: Uuid,
pub following: bool,
pub followed_by: bool,
pub requested: bool,
}
impl TryFrom<&Row> for Relationship {
type Error = tokio_postgres::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let relationship = Relationship {
id: row.try_get("id")?,
following: row.try_get("following")?,
followed_by: row.try_get("followed_by")?,
requested: row.try_get("requested")?,
};
Ok(relationship)
}
}
#[derive(Clone, PartialEq)]
pub enum FollowRequestStatus {
Pending,
Accepted,
Rejected,
}
impl From<FollowRequestStatus> for i16 {
fn from(value: FollowRequestStatus) -> i16 {
match value {
FollowRequestStatus::Pending => 1,
FollowRequestStatus::Accepted => 2,
FollowRequestStatus::Rejected => 3,
}
}
}
impl TryFrom<i16> for FollowRequestStatus {
type Error = ConversionError;
fn try_from(value: i16) -> Result<Self, Self::Error> {
let status = match value {
1 => Self::Pending,
2 => Self::Accepted,
3 => Self::Rejected,
_ => return Err(ConversionError),
};
Ok(status)
}
}
#[derive(FromSql)]
#[postgres(name = "follow_request")]
pub struct DbFollowRequest {
pub id: Uuid,
pub source_id: Uuid,
pub target_id: Uuid,
pub request_status: i16,
}
pub struct FollowRequest {
pub id: Uuid,
pub source_id: Uuid,
pub target_id: Uuid,
pub status: FollowRequestStatus,
}

2
src/models/users/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod queries;
pub mod types;

212
src/models/users/queries.rs Normal file
View file

@ -0,0 +1,212 @@
use tokio_postgres::GenericClient;
use uuid::Uuid;
use crate::errors::DatabaseError;
use crate::models::profiles::queries::create_profile;
use crate::models::profiles::types::{DbActorProfile, ProfileCreateData};
use crate::utils::crypto::generate_random_string;
use super::types::{DbUser, User, UserRegistrationData};
pub async fn generate_invite_code(
db_client: &impl GenericClient,
) -> Result<String, DatabaseError> {
let invite_code = generate_random_string();
db_client.execute(
"
INSERT INTO user_invite_code (code)
VALUES ($1)
",
&[&invite_code],
).await?;
Ok(invite_code)
}
pub async fn get_invite_codes(
db_client: &impl GenericClient,
) -> Result<Vec<String>, DatabaseError> {
let rows = db_client.query(
"
SELECT code
FROM user_invite_code
WHERE used = FALSE
",
&[],
).await?;
let codes: Vec<String> = rows.iter()
.map(|row| row.try_get("code"))
.collect::<Result<_, _>>()?;
Ok(codes)
}
pub async fn is_valid_invite_code(
db_client: &impl GenericClient,
invite_code: &str,
) -> Result<bool, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT 1 FROM user_invite_code
WHERE code = $1 AND used = FALSE
",
&[&invite_code],
).await?;
Ok(maybe_row.is_some())
}
pub async fn get_user_by_id(
db_client: &impl GenericClient,
user_id: &Uuid,
) -> Result<User, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT user_account, actor_profile
FROM user_account JOIN actor_profile USING (id)
WHERE id = $1
",
&[&user_id],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("user"))?;
let db_user: DbUser = row.try_get("user_account")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let user = User {
id: db_user.id,
wallet_address: db_user.wallet_address,
password_hash: db_user.password_hash,
private_key: db_user.private_key,
profile: db_profile,
};
Ok(user)
}
pub async fn get_user_by_name(
db_client: &impl GenericClient,
username: &str,
) -> Result<User, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT user_account, actor_profile
FROM user_account JOIN actor_profile USING (id)
WHERE actor_profile.username = $1
",
&[&username],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("user"))?;
let db_user: DbUser = row.try_get("user_account")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let user = User {
id: db_user.id,
wallet_address: db_user.wallet_address,
password_hash: db_user.password_hash,
private_key: db_user.private_key,
profile: db_profile,
};
Ok(user)
}
pub async fn is_registered_user(
db_client: &impl GenericClient,
username: &str,
) -> Result<bool, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT 1 FROM user_account JOIN actor_profile USING (id)
WHERE actor_profile.username = $1
",
&[&username],
).await?;
Ok(maybe_row.is_some())
}
pub async fn create_user(
db_client: &mut impl GenericClient,
form: UserRegistrationData,
password_hash: String,
private_key_pem: String,
) -> Result<User, DatabaseError> {
let transaction = db_client.transaction().await?;
// Use invite code
if let Some(ref invite_code) = form.invite_code {
let updated_count = transaction.execute(
"
UPDATE user_invite_code
SET used = TRUE
WHERE code = $1 AND used = FALSE
",
&[&invite_code],
).await?;
if updated_count == 0 {
Err(DatabaseError::NotFound("invite code"))?;
}
}
// Create profile
let profile_data = ProfileCreateData {
username: form.username.clone(),
display_name: None,
acct: form.username.clone(),
bio: None,
avatar: None,
banner: None,
actor: None,
};
let profile = create_profile(&transaction, &profile_data).await?;
// Create user
let result = transaction.query_one(
"
INSERT INTO user_account (
id, wallet_address, password_hash, private_key, invite_code
)
VALUES ($1, $2, $3, $4, $5)
RETURNING user_account
",
&[
&profile.id,
&form.wallet_address,
&password_hash,
&private_key_pem,
&form.invite_code,
],
).await;
match result {
Ok(row) => {
transaction.commit().await?;
let db_user: DbUser = row.try_get("user_account")?;
let user = User {
id: db_user.id,
wallet_address: db_user.wallet_address,
password_hash: db_user.password_hash,
private_key: db_user.private_key,
profile,
};
Ok(user)
},
Err(err) => {
// TODO: catch user already exists error
log::info!("{}", err);
Err(DatabaseError::AlreadyExists("user"))?
},
}
}
pub async fn get_user_by_wallet_address(
db_client: &impl GenericClient,
wallet_address: &str,
) -> Result<User, DatabaseError> {
let maybe_row = db_client.query_opt(
"
SELECT user_account, actor_profile
FROM user_account JOIN actor_profile USING (id)
WHERE wallet_address = $1
",
&[&wallet_address],
).await?;
let row = maybe_row.ok_or(DatabaseError::NotFound("user"))?;
let db_user: DbUser = row.try_get("user_account")?;
let db_profile: DbActorProfile = row.try_get("actor_profile")?;
let user = User {
id: db_user.id,
wallet_address: db_user.wallet_address,
password_hash: db_user.password_hash,
private_key: db_user.private_key,
profile: db_profile,
};
Ok(user)
}

72
src/models/users/types.rs Normal file
View file

@ -0,0 +1,72 @@
use chrono::{DateTime, Utc};
use postgres_types::FromSql;
use regex::Regex;
use serde::Deserialize;
use uuid::Uuid;
use crate::errors::ValidationError;
use crate::models::profiles::types::DbActorProfile;
#[derive(FromSql)]
#[postgres(name = "user_account")]
pub struct DbUser {
pub id: Uuid,
pub wallet_address: String,
pub password_hash: String,
pub private_key: String,
pub invite_code: Option<String>,
pub created_at: DateTime<Utc>,
}
// Represents local user
#[derive(Clone)]
pub struct User {
pub id: Uuid,
pub wallet_address: String,
pub password_hash: String,
pub private_key: String,
pub profile: DbActorProfile,
}
#[derive(Deserialize)]
pub struct UserRegistrationData {
pub username: String,
pub signature: String,
pub wallet_address: String,
pub invite_code: Option<String>,
}
fn validate_username(username: &str) -> Result<(), ValidationError> {
let username_regexp = Regex::new(r"^[a-z0-9_]+$").unwrap();
if !username_regexp.is_match(username) {
return Err(ValidationError("invalid username"));
}
Ok(())
}
impl UserRegistrationData {
/// Validate and clean.
pub fn clean(&self) -> Result<(), ValidationError> {
validate_username(&self.username)?;
Ok(())
}
}
#[derive(Deserialize)]
pub struct UserLoginData {
pub signature: String,
pub wallet_address: String,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validate_username() {
let result_1 = validate_username("name_1");
assert_eq!(result_1.is_ok(), true);
let result_2 = validate_username("name&");
assert_eq!(result_2.is_ok(), false);
}
}

2
src/nodeinfo/mod.rs Normal file
View file

@ -0,0 +1,2 @@
mod types;
pub mod views;

68
src/nodeinfo/types.rs Normal file
View file

@ -0,0 +1,68 @@
/// http://nodeinfo.diaspora.software/schema.html
use serde::Serialize;
use crate::config::Config;
#[derive(Serialize)]
struct Software {
name: String,
version: String,
}
#[derive(Serialize)]
struct Services {
inbound: Vec<String>,
outbound: Vec<String>,
}
#[derive(Serialize)]
struct Users {
}
#[derive(Serialize)]
struct Usage {
users: Users,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Metadata {
node_name: String,
node_description: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct NodeInfo20 {
version: String,
software: Software,
protocols: Vec<String>,
services: Services,
open_registrations: bool,
usage: Usage,
metadata: Metadata,
}
impl NodeInfo20 {
pub fn new(config: &Config) -> Self {
let software = Software {
name: "mitra".to_string(),
version: config.version.clone(),
};
let services = Services { inbound: vec![], outbound: vec![] };
let metadata = Metadata {
node_name: config.instance_title.clone(),
node_description: config.instance_short_description.clone(),
};
Self {
version: "2.0".to_string(),
software,
protocols: vec!["activitypub".to_string()],
services,
open_registrations: config.registrations_open,
usage: Usage { users: Users { } },
metadata,
}
}
}

39
src/nodeinfo/views.rs Normal file
View file

@ -0,0 +1,39 @@
/// http://nodeinfo.diaspora.software/protocol.html
use actix_web::{get, web, HttpResponse};
use crate::config::Config;
use crate::errors::HttpError;
use crate::webfinger::types::{
Link,
JsonResourceDescriptor,
};
use super::types::NodeInfo20;
#[get("/.well-known/nodeinfo")]
pub async fn get_nodeinfo(
config: web::Data<Config>,
) -> Result<HttpResponse, HttpError> {
let nodeinfo_2_0_url = format!("{}/nodeinfo/2.0", config.instance_url());
let link = Link {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0".to_string(),
link_type: None,
href: Some(nodeinfo_2_0_url),
};
let jrd = JsonResourceDescriptor {
subject: config.instance_url(),
links: vec![link],
};
let response = HttpResponse::Ok().json(jrd);
Ok(response)
}
#[get("/nodeinfo/2.0")]
pub async fn get_nodeinfo_2_0(
config: web::Data<Config>,
) -> Result<HttpResponse, HttpError> {
let nodeinfo = NodeInfo20::new(&config);
let response = HttpResponse::Ok().json(nodeinfo);
Ok(response)
}

24
src/scheduler.rs Normal file
View file

@ -0,0 +1,24 @@
use std::time::Duration;
use crate::config::Config;
use crate::database::Pool;
use crate::ethereum::nft::{get_nft_contract, process_events};
pub fn run(config: Config, db_pool: Pool) -> () {
actix_rt::spawn(async move {
let mut interval = actix_rt::time::interval(Duration::from_secs(30));
// Verify config and create contract interface
let web3_contract = get_nft_contract(&config).await
.map_err(|err| log::error!("{}", err))
.ok();
loop {
interval.tick().await;
// Process events only if contract is properly configured
if let Some((web3, contract)) = web3_contract.as_ref() {
process_events(web3, contract, &db_pool).await.unwrap_or_else(|err| {
log::error!("{}", err);
});
}
}
});
}

115
src/utils/crypto.rs Normal file
View file

@ -0,0 +1,115 @@
use rand;
use rand::prelude::*;
use rsa::{Hash, PaddingScheme, PublicKey, RsaPrivateKey, RsaPublicKey};
use rsa::pkcs8::{FromPrivateKey, FromPublicKey, ToPrivateKey, ToPublicKey};
use sha2::{Digest, Sha256};
pub fn generate_random_string() -> String {
let mut rng = rand::thread_rng();
let value: [u8; 16] = rng.gen();
hex::encode(value)
}
pub fn hash_password(password: &str) -> Result<String, argon2::Error> {
let mut rng = rand::thread_rng();
let salt: [u8; 32] = rng.gen();
let config = argon2::Config::default();
argon2::hash_encoded(password.as_bytes(), &salt, &config)
}
pub fn verify_password(
password_hash: &str,
password: &str,
) -> Result<bool, argon2::Error> {
argon2::verify_encoded(password_hash, password.as_bytes())
}
pub fn generate_private_key() -> Result<RsaPrivateKey, rsa::errors::Error> {
let mut rng = rand::rngs::OsRng;
let bits = 2048;
RsaPrivateKey::new(&mut rng, bits)
}
pub fn serialize_private_key(
private_key: RsaPrivateKey,
) -> Result<String, rsa::pkcs8::Error> {
private_key.to_pkcs8_pem().map(|val| val.to_string())
}
pub fn deserialize_private_key(
private_key_pem: &str,
) -> Result<RsaPrivateKey, rsa::pkcs8::Error> {
RsaPrivateKey::from_pkcs8_pem(&private_key_pem)
}
pub fn get_public_key_pem(
private_key: &RsaPrivateKey,
) -> Result<String, rsa::pkcs8::Error> {
let public_key = RsaPublicKey::from(private_key);
public_key.to_public_key_pem()
}
pub fn deserialize_public_key(
public_key_pem: &str,
) -> Result<RsaPublicKey, rsa::pkcs8::Error> {
RsaPublicKey::from_public_key_pem(&public_key_pem.trim())
}
pub fn sign_message(
private_key: &RsaPrivateKey,
message: &str,
) -> Result<String, rsa::errors::Error> {
let digest = Sha256::digest(message.as_bytes());
let padding = PaddingScheme::new_pkcs1v15_sign(Some(Hash::SHA2_256));
let signature = private_key.sign(padding, &digest)?;
let signature_b64 = base64::encode(&signature);
Ok(signature_b64)
}
pub fn get_message_digest(message: &str) -> String {
let digest = Sha256::digest(message.as_bytes());
let digest_b64 = base64::encode(digest);
format!("SHA-256={}", digest_b64)
}
pub fn verify_signature(
public_key: &RsaPublicKey,
message: &str,
signature_b64: &str,
) -> Result<bool, base64::DecodeError> {
let digest = Sha256::digest(message.as_bytes());
let padding = PaddingScheme::new_pkcs1v15_sign(Some(Hash::SHA2_256));
let signature = base64::decode(signature_b64)?;
let is_valid = public_key.verify(
padding,
&digest,
&signature,
).is_ok();
Ok(is_valid)
}
#[cfg(test)]
mod tests {
use rand::rngs::OsRng;
use super::*;
#[test]
fn test_public_key_serialization_deserialization() {
let private_key = RsaPrivateKey::new(&mut OsRng, 512).unwrap();
let public_key_pem = get_public_key_pem(&private_key).unwrap();
let public_key = deserialize_public_key(&public_key_pem).unwrap();
assert_eq!(public_key, RsaPublicKey::from(&private_key));
}
#[test]
fn test_verify_signature() {
let private_key = RsaPrivateKey::new(&mut OsRng, 512).unwrap();
let message = "test".to_string();
let signature = sign_message(&private_key, &message).unwrap();
let public_key = RsaPublicKey::from(&private_key);
let is_valid = verify_signature(&public_key, &message, &signature).unwrap();
assert_eq!(is_valid, true);
}
}

68
src/utils/files.rs Normal file
View file

@ -0,0 +1,68 @@
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf;
use mime_guess::get_mime_extensions_str;
use mime_sniffer::MimeTypeSniffer;
use sha2::{Digest, Sha256};
#[derive(thiserror::Error, Debug)]
pub enum FileError {
#[error(transparent)]
WriteError(#[from] std::io::Error),
#[error("base64 decoding error")]
Base64DecodingError(#[from] base64::DecodeError),
#[error("invalid media type")]
InvalidMediaType,
}
pub fn save_file(data: Vec<u8>, output_dir: &PathBuf) -> Result<String, FileError> {
let digest = Sha256::digest(&data);
let mut file_name = hex::encode(digest);
let maybe_extension = data.sniff_mime_type()
.and_then(|media_type| get_mime_extensions_str(media_type))
.and_then(|extensions| extensions.first());
if let Some(extension) = maybe_extension {
// Append extension for known media types
file_name = format!("{}.{}", file_name, extension);
}
let file_path = output_dir.join(&file_name);
let mut file = File::create(&file_path)?;
file.write_all(&data)?;
Ok(file_name)
}
fn sniff_media_type(data: &Vec<u8>) -> Option<String> {
data.sniff_mime_type().map(|val| val.to_string())
}
pub fn save_b64_file(
b64data: &str,
output_dir: &PathBuf,
) -> Result<(String, Option<String>), FileError> {
let data = base64::decode(b64data)?;
let media_type = sniff_media_type(&data);
let file_name = save_file(data, output_dir)?;
Ok((file_name, media_type))
}
pub fn save_validated_b64_file(
b64data: &str,
output_dir: &PathBuf,
media_type_prefix: &str,
) -> Result<(String, String), FileError> {
let data = base64::decode(b64data)?;
let media_type = sniff_media_type(&data)
.ok_or(FileError::InvalidMediaType)?;
if !media_type.starts_with(media_type_prefix) {
return Err(FileError::InvalidMediaType);
}
let file_name = save_file(data, output_dir)?;
Ok((file_name, media_type))
}
pub fn get_file_url(instance_url: &str, file_name: &str) -> String {
format!("{}/media/{}", instance_url, file_name)
}

27
src/utils/html.rs Normal file
View file

@ -0,0 +1,27 @@
use std::collections::HashSet;
use ammonia::Builder;
pub fn clean_html(unsafe_html: &str) -> String {
let mut allowed_tags = HashSet::new();
allowed_tags.insert("a");
allowed_tags.insert("br");
let safe_html = Builder::default()
.tags(allowed_tags)
.clean(unsafe_html)
.to_string();
safe_html
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_clean_html() {
let unsafe_html = r#"<p>test <b>bold</b><script>dangerous</script> with <a href="https://example.com">link</a></p>"#;
let safe_html = clean_html(unsafe_html);
assert_eq!(safe_html, r#"test bold with <a href="https://example.com" rel="noopener noreferrer">link</a>"#);
}
}

3
src/utils/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod crypto;
pub mod files;
pub mod html;

2
src/webfinger/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod types;
pub mod views;

27
src/webfinger/types.rs Normal file
View file

@ -0,0 +1,27 @@
/// https://webfinger.net/
use serde::{Serialize, Deserialize};
pub const JRD_CONTENT_TYPE: &str = "application/jrd+json";
#[derive(Deserialize)]
pub struct WebfingerQueryParams {
pub resource: String,
}
#[derive(Serialize, Deserialize)]
pub struct Link {
pub rel: String,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub link_type: Option<String>,
pub href: Option<String>,
}
// https://datatracker.ietf.org/doc/html/rfc7033#section-4.4
#[derive(Serialize, Deserialize)]
pub struct JsonResourceDescriptor {
pub subject: String,
pub links: Vec<Link>,
}

66
src/webfinger/views.rs Normal file
View file

@ -0,0 +1,66 @@
use actix_web::{get, web, HttpResponse};
use regex::Regex;
use crate::activitypub::views::get_actor_url;
use crate::activitypub::constants::ACTIVITY_CONTENT_TYPE;
use crate::config::Config;
use crate::database::{Pool, get_database_client};
use crate::errors::HttpError;
use crate::models::users::queries::is_registered_user;
use super::types::{
JRD_CONTENT_TYPE,
WebfingerQueryParams,
Link,
JsonResourceDescriptor,
};
pub async fn get_user_info(
db_pool: &Pool,
config: &Config,
query_params: WebfingerQueryParams,
) -> Result<JsonResourceDescriptor, HttpError> {
// Parse 'acct' URI
// https://datatracker.ietf.org/doc/html/rfc7565#section-7
let uri_regexp = Regex::new(r"acct:(?P<user>\w+)@(?P<instance>.+)").unwrap();
let uri_caps = uri_regexp.captures(&query_params.resource)
.ok_or(HttpError::ValidationError("invalid query target".into()))?;
let username = uri_caps.name("user")
.ok_or(HttpError::ValidationError("invalid query target".into()))?
.as_str();
let instance_uri = uri_caps.name("instance")
.ok_or(HttpError::ValidationError("invalid query target".into()))?
.as_str();
if instance_uri != config.instance_uri {
// Wrong instance URI
return Err(HttpError::NotFoundError("user"));
}
let db_client = &**get_database_client(db_pool).await?;
if !is_registered_user(db_client, &username).await? {
return Err(HttpError::NotFoundError("user"));
}
let actor_url = get_actor_url(&config.instance_url(), &username);
let link = Link {
rel: "self".to_string(),
link_type: Some(ACTIVITY_CONTENT_TYPE.to_string()),
href: Some(actor_url),
};
let jrd = JsonResourceDescriptor {
subject: query_params.resource,
links: vec![link],
};
Ok(jrd)
}
#[get("/.well-known/webfinger")]
async fn get_descriptor(
config: web::Data<Config>,
db_bool: web::Data<Pool>,
query_params: web::Query<WebfingerQueryParams>,
) -> Result<HttpResponse, HttpError> {
let jrd = get_user_info(&db_bool, &config, query_params.into_inner()).await?;
let response = HttpResponse::Ok()
.content_type(JRD_CONTENT_TYPE)
.json(jrd);
Ok(response)
}