1
0
Fork 0
mirror of https://github.com/actix/actix-web.git synced 2024-09-21 11:00:08 +00:00

Merge branch 'master' into ci-semver-check-on-label

This commit is contained in:
Rob Ede 2024-08-10 03:17:23 +01:00 committed by GitHub
commit a7c5366503
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
188 changed files with 6408 additions and 2493 deletions

View file

@ -1,14 +0,0 @@
[alias]
lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
# lib checking
ci-check-min = "hack --workspace check --no-default-features"
ci-check-default = "hack --workspace check"
ci-check-default-tests = "check --workspace --tests"
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,experimental-io-uring check"
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
# testing
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"

View file

@ -30,49 +30,41 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL - name: Install OpenSSL
if: matrix.target.os == 'windows-latest' if: matrix.target.os == 'windows-latest'
run: choco install openssl -y --forcex64 --no-progress shell: bash
- name: Set OpenSSL dir in env
if: matrix.target.os == 'windows-latest'
run: | run: |
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL-Win64' | Out-File -FilePath $env:GITHUB_ENV -Append set -e
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append choco install openssl --version=1.1.1.2100 -y --no-progress
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Install Rust (${{ matrix.version.name }}) - name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: ${{ matrix.version.version }} toolchain: ${{ matrix.version.version }}
- name: Install cargo-hack - name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.22.0 uses: taiki-e/install-action@v2.42.17
with: with:
tool: cargo-hack tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
- name: check minimal - name: check minimal
run: cargo ci-check-min run: just check-min
- name: check default - name: check default
run: cargo ci-check-default run: just check-default
- name: tests - name: tests
timeout-minutes: 60 timeout-minutes: 60
run: | run: just test
cargo test --lib --tests -p=actix-router --all-features
cargo test --lib --tests -p=actix-http --all-features
cargo test --lib --tests -p=actix-web --features=rustls-0_20,rustls-0_21,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
cargo test --lib --tests -p=actix-web-codegen --all-features
cargo test --lib --tests -p=awc --all-features
cargo test --lib --tests -p=actix-http-test --all-features
cargo test --lib --tests -p=actix-test --all-features
cargo test --lib --tests -p=actix-files
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: Clear the cargo caches - name: CI cache clean
run: | run: cargo-ci-cache-clean
cargo install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
cargo-cache
ci_feature_powerset_check: ci_feature_powerset_check:
name: Verify Feature Combinations name: Verify Feature Combinations
@ -81,34 +73,19 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Free Disk Space
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 run: ./scripts/free-disk-space.sh
- name: Install cargo-hack - name: Setup mold linker
uses: taiki-e/install-action@v2.22.0 uses: rui314/setup-mold@v1
with:
tool: cargo-hack
- name: check feature combinations
run: cargo ci-check-all-feature-powerset
- name: check feature combinations
run: cargo ci-check-all-feature-powerset-linux
nextest:
name: nextest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Install nextest - name: Install just, cargo-hack
uses: taiki-e/install-action@v2.22.0 uses: taiki-e/install-action@v2.42.17
with: with:
tool: nextest tool: just,cargo-hack
- name: Test with cargo-nextest - name: Check feature combinations
run: cargo nextest run run: just check-feature-combinations

View file

@ -16,7 +16,13 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
read_msrv:
name: Read MSRV
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
build_and_test: build_and_test:
needs: read_msrv
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -26,7 +32,7 @@ jobs:
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin } - { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc } - { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
version: version:
- { name: msrv, version: 1.68.0 } - { name: msrv, version: "${{ needs.read_msrv.outputs.msrv }}" }
- { name: stable, version: stable } - { name: stable, version: stable }
name: ${{ matrix.target.name }} / ${{ matrix.version.name }} name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
@ -35,56 +41,49 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL - name: Install OpenSSL
if: matrix.target.os == 'windows-latest' if: matrix.target.os == 'windows-latest'
run: choco install openssl -y --forcex64 --no-progress shell: bash
- name: Set OpenSSL dir in env
if: matrix.target.os == 'windows-latest'
run: | run: |
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL-Win64' | Out-File -FilePath $env:GITHUB_ENV -Append set -e
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append choco install openssl --version=1.1.1.2100 -y --no-progress
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Setup mold linker
if: matrix.target.os == 'ubuntu-latest'
uses: rui314/setup-mold@v1
- name: Install Rust (${{ matrix.version.name }}) - name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: ${{ matrix.version.version }} toolchain: ${{ matrix.version.version }}
- name: Install cargo-hack - name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.22.0 uses: taiki-e/install-action@v2.42.17
with: with:
tool: cargo-hack tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
- name: workaround MSRV issues - name: workaround MSRV issues
if: matrix.version.name == 'msrv' if: matrix.version.name == 'msrv'
run: | run: just downgrade-for-msrv
cargo update -p=clap --precise=4.3.24
cargo update -p=clap_lex --precise=0.5.0
cargo update -p=anstyle --precise=1.0.2
- name: check minimal - name: check minimal
run: cargo ci-check-min run: just check-min
- name: check default - name: check default
run: cargo ci-check-default run: just check-default
- name: tests - name: tests
timeout-minutes: 60 timeout-minutes: 60
run: | run: just test
cargo test --lib --tests -p=actix-router --all-features
cargo test --lib --tests -p=actix-http --all-features
cargo test --lib --tests -p=actix-web --features=rustls-0_20,rustls-0_21,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
cargo test --lib --tests -p=actix-web-codegen --all-features
cargo test --lib --tests -p=awc --all-features
cargo test --lib --tests -p=actix-http-test --all-features
cargo test --lib --tests -p=actix-test --all-features
cargo test --lib --tests -p=actix-files
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: Clear the cargo caches - name: CI cache clean
run: | run: cargo-ci-cache-clean
cargo install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
cargo-cache
io-uring: io-uring:
name: io-uring tests name: io-uring tests
@ -93,7 +92,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
@ -109,10 +108,14 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly) - name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
- name: Install just
uses: taiki-e/install-action@v2.42.17
with:
tool: just
- name: doc tests - name: doc tests
run: cargo ci-doctest run: just test-docs
timeout-minutes: 60

View file

@ -17,21 +17,24 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
components: llvm-tools-preview toolchain: nightly
components: llvm-tools
- name: Install cargo-llvm-cov - name: Install just, cargo-llvm-cov, cargo-nextest
uses: taiki-e/install-action@v2.22.0 uses: taiki-e/install-action@v2.42.17
with: with:
tool: cargo-llvm-cov tool: just,cargo-llvm-cov,cargo-nextest
- name: Generate code coverage - name: Generate code coverage
run: cargo llvm-cov --workspace --all-features --codecov --output-path codecov.json run: just test-coverage-codecov
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v3.1.4 uses: codecov/codecov-action@v4.5.0
with: with:
files: codecov.json files: codecov.json
fail_ci_if_error: true fail_ci_if_error: true
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View file

@ -17,12 +17,13 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 - name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
components: rustfmt components: rustfmt
- name: Check with rustfmt - name: Check with Rustfmt
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
clippy: clippy:
@ -35,7 +36,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
components: clippy components: clippy
@ -53,7 +54,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 - name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
components: rust-docs components: rust-docs
@ -63,25 +65,52 @@ jobs:
RUSTDOCFLAGS: -D warnings RUSTDOCFLAGS: -D warnings
run: cargo +nightly doc --no-deps --workspace --all-features run: cargo +nightly doc --no-deps --workspace --all-features
public-api-diff: check-external-types:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly-2024-05-01)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly-2024-05-01
- name: Install just
uses: taiki-e/install-action@v2.42.17
with:
tool: just
- name: Install cargo-check-external-types
uses: taiki-e/cache-cargo-install-action@v2.0.1
with:
tool: cargo-check-external-types
- name: check external types
run: just check-external-types-all +nightly-2024-05-01
public-api-diff:
runs-on: ubuntu-latest
steps:
- name: Checkout main branch
uses: actions/checkout@v4
with: with:
ref: ${{ github.base_ref }} ref: ${{ github.base_ref }}
- uses: actions/checkout@v4 - name: Checkout PR branch
uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0 - name: Install Rust (nightly-2024-06-07)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly-2023-08-25 toolchain: nightly-2024-06-07
- uses: taiki-e/cache-cargo-install-action@v1.3.0 - name: Install cargo-public-api
uses: taiki-e/install-action@v2.42.17
with: with:
tool: cargo-public-api tool: cargo-public-api
- name: generate API diff - name: Generate API diff
run: | run: |
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
cargo public-api --manifest-path "$f" diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }} cargo public-api --manifest-path "$f" --simplified diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
done done

View file

@ -1,41 +0,0 @@
name: Upload Documentation
on:
push:
branches: [master]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
permissions:
contents: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with:
toolchain: nightly
- name: Build Docs
run: cargo +nightly doc --no-deps --workspace --all-features
env:
RUSTDOCFLAGS: --cfg=docsrs
- name: Tweak HTML
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
- name: Deploy to GitHub Pages
uses: JamesIves/github-pages-deploy-action@v4.5.0
with:
folder: target/doc
single-commit: true

View file

@ -1,5 +1,5 @@
overrides: overrides:
- files: '*.md' - files: "*.md"
options: options:
printWidth: 9999 printWidth: 9999
proseWrap: never proseWrap: never

View file

@ -15,9 +15,11 @@ members = [
] ]
[workspace.package] [workspace.package]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
rust-version = "1.68" rust-version = "1.72"
[profile.dev] [profile.dev]
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much. # Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
@ -49,3 +51,11 @@ awc = { path = "awc" }
# actix-utils = { path = "../actix-net/actix-utils" } # actix-utils = { path = "../actix-net/actix-utils" }
# actix-tls = { path = "../actix-net/actix-tls" } # actix-tls = { path = "../actix-net/actix-tls" }
# actix-server = { path = "../actix-net/actix-server" } # actix-server = { path = "../actix-net/actix-server" }
[workspace.lints.rust]
rust_2018_idioms = { level = "deny" }
future_incompatible = { level = "deny" }
nonstandard_style = { level = "deny" }
[workspace.lints.clippy]
# clone_on_ref_ptr = { level = "deny" }

View file

@ -2,6 +2,18 @@
## Unreleased ## Unreleased
## 0.6.6
- Update `tokio-uring` dependency to `0.4`.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.5
- Fix handling of special characters in filenames.
## 0.6.4
- Fix handling of newlines in filenames.
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 0.6.3 ## 0.6.3

View file

@ -1,6 +1,6 @@
[package] [package]
name = "actix-files" name = "actix-files"
version = "0.6.3" version = "0.6.6"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@ -13,9 +13,14 @@ categories = ["asynchronous", "web-programming::http-server"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[lib] [package.metadata.cargo_check_external_types]
name = "actix_files" allowed_external_types = [
path = "src/lib.rs" "actix_http::*",
"actix_service::*",
"actix_web::*",
"http::*",
"mime::*",
]
[features] [features]
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"] experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
@ -40,12 +45,15 @@ v_htmlescape = "0.15.5"
# experimental-io-uring # experimental-io-uring
[target.'cfg(target_os = "linux")'.dependencies] [target.'cfg(target_os = "linux")'.dependencies]
tokio-uring = { version = "0.4", optional = true, features = ["bytes"] } tokio-uring = { version = "0.5", optional = true, features = ["bytes"] }
actix-server = { version = "2.2", optional = true } # ensure matching tokio-uring versions actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
[dev-dependencies] [dev-dependencies]
actix-rt = "2.7" actix-rt = "2.7"
actix-test = "0.1" actix-test = "0.1"
actix-web = "4" actix-web = "4"
env_logger = "0.10" env_logger = "0.11"
tempfile = "3.2" tempfile = "3.2"
[lints]
workspace = true

View file

@ -1,18 +1,32 @@
# actix-files # `actix-files`
> Static file serving for Actix Web <!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.3)](https://docs.rs/actix-files/0.6.3) [![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.6)](https://docs.rs/actix-files/0.6.6)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-files.svg) ![License](https://img.shields.io/crates/l/actix-files.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.3/status.svg)](https://deps.rs/crate/actix-files/0.6.3) [![dependency status](https://deps.rs/crate/actix-files/0.6.6/status.svg)](https://deps.rs/crate/actix-files/0.6.6)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files) [![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-files) <!-- cargo-rdme start -->
- [Example Project](https://github.com/actix/examples/tree/master/basics/static-files)
- Minimum Supported Rust Version (MSRV): 1.68 Static file serving for Actix Web.
Provides a non-blocking service for serving static files from disk.
## Examples
```rust
use actix_web::App;
use actix_files::Files;
let app = App::new()
.service(Files::new("/static", ".").prefer_utf8(true));
```
<!-- cargo-rdme end -->

View file

@ -11,8 +11,7 @@
//! .service(Files::new("/static", ".").prefer_utf8(true)); //! .service(Files::new("/static", ".").prefer_utf8(true));
//! ``` //! ```
#![deny(rust_2018_idioms, nonstandard_style)] #![warn(missing_docs, missing_debug_implementations)]
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -75,7 +74,7 @@ mod tests {
dev::ServiceFactory, dev::ServiceFactory,
guard, guard,
http::{ http::{
header::{self, ContentDisposition, DispositionParam, DispositionType}, header::{self, ContentDisposition, DispositionParam},
Method, StatusCode, Method, StatusCode,
}, },
middleware::Compress, middleware::Compress,
@ -307,11 +306,11 @@ mod tests {
let resp = file.respond_to(&req); let resp = file.respond_to(&req);
assert_eq!( assert_eq!(
resp.headers().get(header::CONTENT_TYPE).unwrap(), resp.headers().get(header::CONTENT_TYPE).unwrap(),
"application/javascript; charset=utf-8" "text/javascript",
); );
assert_eq!( assert_eq!(
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(), resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
"inline; filename=\"test.js\"" "inline; filename=\"test.js\"",
); );
} }
@ -568,6 +567,30 @@ mod tests {
assert_eq!(bytes, data); assert_eq!(bytes, data);
} }
#[cfg(not(target_os = "windows"))]
#[actix_rt::test]
async fn test_static_files_with_special_characters() {
// Create the file we want to test against ad-hoc. We can't check it in as otherwise
// Windows can't even checkout this repository.
let temp_dir = tempfile::tempdir().unwrap();
let file_with_newlines = temp_dir.path().join("test\n\x0B\x0C\rnewline.text");
fs::write(&file_with_newlines, "Look at my newlines").unwrap();
let srv = test::init_service(
App::new().service(Files::new("/", temp_dir.path()).index_file("Cargo.toml")),
)
.await;
let request = TestRequest::get()
.uri("/test%0A%0B%0C%0Dnewline.text")
.to_request();
let response = test::call_service(&srv, request).await;
assert_eq!(response.status(), StatusCode::OK);
let bytes = test::read_body(response).await;
let data = web::Bytes::from(fs::read(file_with_newlines).unwrap());
assert_eq!(bytes, data);
}
#[actix_rt::test] #[actix_rt::test]
async fn test_files_not_allowed() { async fn test_files_not_allowed() {
let srv = test::init_service(App::new().service(Files::new("/", "."))).await; let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
@ -840,9 +863,9 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_percent_encoding_2() { async fn test_percent_encoding_2() {
let tmpdir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let filename = match cfg!(unix) { let filename = match cfg!(unix) {
true => "ض:?#[]{}<>()@!$&'`|*+,;= %20.test", true => "ض:?#[]{}<>()@!$&'`|*+,;= %20\n.test",
false => "ض#[]{}()@!$&'`+,;= %20.test", false => "ض#[]{}()@!$&'`+,;= %20.test",
}; };
let filename_encoded = filename let filename_encoded = filename
@ -852,9 +875,9 @@ mod tests {
write!(&mut buf, "%{:02X}", c).unwrap(); write!(&mut buf, "%{:02X}", c).unwrap();
buf buf
}); });
std::fs::File::create(tmpdir.path().join(filename)).unwrap(); std::fs::File::create(temp_dir.path().join(filename)).unwrap();
let srv = test::init_service(App::new().service(Files::new("", tmpdir.path()))).await; let srv = test::init_service(App::new().service(Files::new("/", temp_dir.path()))).await;
let req = TestRequest::get() let req = TestRequest::get()
.uri(&format!("/{}", filename_encoded)) .uri(&format!("/{}", filename_encoded))

View file

@ -24,7 +24,6 @@ use bitflags::bitflags;
use derive_more::{Deref, DerefMut}; use derive_more::{Deref, DerefMut};
use futures_core::future::LocalBoxFuture; use futures_core::future::LocalBoxFuture;
use mime::Mime; use mime::Mime;
use mime_guess::from_path;
use crate::{encoding::equiv_utf8_text, range::HttpRange}; use crate::{encoding::equiv_utf8_text, range::HttpRange};
@ -128,7 +127,7 @@ impl NamedFile {
} }
}; };
let ct = from_path(&path).first_or_octet_stream(); let ct = mime_guess::from_path(&path).first_or_octet_stream();
let disposition = match ct.type_() { let disposition = match ct.type_() {
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline, mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
@ -140,7 +139,13 @@ impl NamedFile {
_ => DispositionType::Attachment, _ => DispositionType::Attachment,
}; };
let mut parameters = vec![DispositionParam::Filename(String::from(filename.as_ref()))]; // replace special characters in filenames which could occur on some filesystems
let filename_s = filename
.replace('\n', "%0A") // \n line break
.replace('\x0B', "%0B") // \v vertical tab
.replace('\x0C', "%0C") // \f form feed
.replace('\r', "%0D"); // \r carriage return
let mut parameters = vec![DispositionParam::Filename(filename_s)];
if !filename.is_ascii() { if !filename.is_ascii() {
parameters.push(DispositionParam::FilenameExt(ExtendedValue { parameters.push(DispositionParam::FilenameExt(ExtendedValue {

View file

@ -2,6 +2,10 @@
## Unreleased ## Unreleased
- Minimum supported Rust version (MSRV) is now 1.72.
## 3.2.0
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 3.1.0 ## 3.1.0

View file

@ -1,6 +1,6 @@
[package] [package]
name = "actix-http-test" name = "actix-http-test"
version = "3.1.0" version = "3.2.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Various helpers for Actix applications to use during testing" description = "Various helpers for Actix applications to use during testing"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
@ -18,9 +18,17 @@ edition = "2021"
[package.metadata.docs.rs] [package.metadata.docs.rs]
features = [] features = []
[lib] [package.metadata.cargo_check_external_types]
name = "actix_http_test" allowed_external_types = [
path = "src/lib.rs" "actix_codec::*",
"actix_http::*",
"actix_server::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"tokio::*",
]
[features] [features]
default = [] default = []
@ -51,3 +59,6 @@ tokio = { version = "1.24.2", features = ["sync"] }
[dev-dependencies] [dev-dependencies]
actix-http = "3" actix-http = "3"
[lints]
workspace = true

View file

@ -1,17 +1,20 @@
# actix-http-test # `actix-http-test`
> Various helpers for Actix applications to use during testing. <!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.1.0)](https://docs.rs/actix-http-test/3.1.0) [![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.2.0)](https://docs.rs/actix-http-test/3.2.0)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
<br> <br>
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.1.0/status.svg)](https://deps.rs/crate/actix-http-test/3.1.0) [![Dependency Status](https://deps.rs/crate/actix-http-test/3.2.0/status.svg)](https://deps.rs/crate/actix-http-test/3.2.0)
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test) [![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-http-test) <!-- cargo-rdme start -->
- Minimum Supported Rust Version (MSRV): 1.68
Various helpers for Actix applications to use during testing.
<!-- cargo-rdme end -->

View file

@ -1,7 +1,5 @@
//! Various helpers for Actix applications to use during testing. //! Various helpers for Actix applications to use during testing.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]

View file

@ -2,14 +2,57 @@
## Unreleased ## Unreleased
## 3.9.0
### Added
- Implement `FromIterator<(HeaderName, HeaderValue)>` for `HeaderMap`.
## 3.8.0
### Added
- Add `error::InvalidStatusCode` re-export.
## 3.7.0
### Added
- Add `rustls-0_23` crate feature
- Add `{h1::H1Service, h2::H2Service, HttpService}::rustls_0_23()` and `HttpService::rustls_0_23_with_config()` service constructors.
### Changed ### Changed
- Updated `zstd` dependency to `0.13`. - Update `brotli` dependency to `6`.
- Implemented `From<HeaderMap>` for `http::HeaderMap`. - Minimum supported Rust version (MSRV) is now 1.72.
## 3.6.0
### Added
- Add `rustls-0_22` crate feature.
- Add `{h1::H1Service, h2::H2Service, HttpService}::rustls_0_22()` and `HttpService::rustls_0_22_with_config()` service constructors.
- Implement `From<&HeaderMap>` for `http::HeaderMap`.
## 3.5.1
### Fixed ### Fixed
- Do not encode zero-sized response bodies - Prevent hang when returning zero-sized response bodies through compression layer.
## 3.5.0
### Added
- Implement `From<HeaderMap>` for `http::HeaderMap`.
### Changed
- Updated `zstd` dependency to `0.13`.
### Fixed
- Prevent compression of zero-sized response bodies.
## 3.4.0 ## 3.4.0

View file

@ -1,6 +1,6 @@
[package] [package]
name = "actix-http" name = "actix-http"
version = "3.4.0" version = "3.9.0"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@ -20,48 +20,86 @@ edition.workspace = true
rust-version.workspace = true rust-version.workspace = true
[package.metadata.docs.rs] [package.metadata.docs.rs]
# features that docs.rs will build with rustdoc-args = ["--cfg", "docsrs"]
features = ["http2", "ws", "openssl", "rustls-0_20", "rustls-0_21", "compress-brotli", "compress-gzip", "compress-zstd"] features = [
"http2",
"ws",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
]
[lib] [package.metadata.cargo_check_external_types]
name = "actix_http" allowed_external_types = [
path = "src/lib.rs" "actix_codec::*",
"actix_service::*",
"actix_tls::*",
"actix_utils::*",
"bytes::*",
"bytestring::*",
"encoding_rs::*",
"futures_core::*",
"h2::*",
"http::*",
"httparse::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"tokio_util::*",
"tokio::*",
]
[features] [features]
default = [] default = []
# HTTP/2 protocol support # HTTP/2 protocol support
http2 = ["h2"] http2 = ["dep:h2"]
# WebSocket protocol implementation # WebSocket protocol implementation
ws = [ ws = [
"local-channel", "dep:local-channel",
"base64", "dep:base64",
"rand", "dep:rand",
"sha1", "dep:sha1",
] ]
# TLS via OpenSSL # TLS via OpenSSL
openssl = ["actix-tls/accept", "actix-tls/openssl"] openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
# TLS via Rustls v0.20 # TLS via Rustls v0.20
rustls = ["rustls-0_20"] rustls = ["__tls", "rustls-0_20"]
# TLS via Rustls v0.20 # TLS via Rustls v0.20
rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"] rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"]
# TLS via Rustls v0.21 # TLS via Rustls v0.21
rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"] rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
# TLS via Rustls v0.23
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
# Compression codecs # Compression codecs
compress-brotli = ["__compress", "brotli"] compress-brotli = ["__compress", "dep:brotli"]
compress-gzip = ["__compress", "flate2"] compress-gzip = ["__compress", "dep:flate2"]
compress-zstd = ["__compress", "zstd"] compress-zstd = ["__compress", "dep:zstd"]
# Internal (PRIVATE!) features used to aid testing and checking feature status. # Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime. # Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
__compress = [] __compress = []
# Internal (PRIVATE!) features used to aid checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime.
__tls = []
[dependencies] [dependencies]
actix-service = "2" actix-service = "2"
actix-codec = "0.5" actix-codec = "0.5"
@ -89,54 +127,62 @@ tokio-util = { version = "0.7", features = ["io", "codec"] }
tracing = { version = "0.1.30", default-features = false, features = ["log"] } tracing = { version = "0.1.30", default-features = false, features = ["log"] }
# http2 # http2
h2 = { version = "0.3.17", optional = true } h2 = { version = "0.3.26", optional = true }
# websockets # websockets
local-channel = { version = "0.1", optional = true } local-channel = { version = "0.1", optional = true }
base64 = { version = "0.21", optional = true } base64 = { version = "0.22", optional = true }
rand = { version = "0.8", optional = true } rand = { version = "0.8", optional = true }
sha1 = { version = "0.10", optional = true } sha1 = { version = "0.10", optional = true }
# openssl/rustls # openssl/rustls
actix-tls = { version = "3.1", default-features = false, optional = true } actix-tls = { version = "3.4", default-features = false, optional = true }
# compress-* # compress-*
brotli = { version = "3.3.3", optional = true } brotli = { version = "6", optional = true }
flate2 = { version = "1.0.13", optional = true } flate2 = { version = "1.0.13", optional = true }
zstd = { version = "0.13", optional = true } zstd = { version = "0.13", optional = true }
[dev-dependencies] [dev-dependencies]
actix-http-test = { version = "3", features = ["openssl"] } actix-http-test = { version = "3", features = ["openssl"] }
actix-server = "2" actix-server = "2"
actix-tls = { version = "3.1", features = ["openssl"] } actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23-webpki-roots"] }
actix-web = "4" actix-web = "4"
async-stream = "0.3" async-stream = "0.3"
criterion = { version = "0.5", features = ["html_reports"] } criterion = { version = "0.5", features = ["html_reports"] }
env_logger = "0.10" divan = "0.1.8"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
memchr = "2.4" memchr = "2.4"
once_cell = "1.9" once_cell = "1.9"
rcgen = "0.11" rcgen = "0.13"
regex = "1.3" regex = "1.3"
rustversion = "1" rustversion = "1"
rustls-pemfile = "1" rustls-pemfile = "2"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
static_assertions = "1" static_assertions = "1"
tls-openssl = { package = "openssl", version = "0.10.55" } tls-openssl = { package = "openssl", version = "0.10.55" }
tls-rustls_021 = { package = "rustls", version = "0.21" } tls-rustls_023 = { package = "rustls", version = "0.23" }
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] } tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
[lints]
workspace = true
[[example]] [[example]]
name = "ws" name = "ws"
required-features = ["ws", "rustls-0_21"] required-features = ["ws", "rustls-0_23"]
[[example]] [[example]]
name = "tls_rustls" name = "tls_rustls"
required-features = ["http2", "rustls-0_21"] required-features = ["http2", "rustls-0_23"]
[[bench]] [[bench]]
name = "response-body-compression" name = "response-body-compression"
harness = false harness = false
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"] required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
[[bench]]
name = "date-formatting"
harness = false

View file

@ -5,21 +5,16 @@
<!-- prettier-ignore-start --> <!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.4.0)](https://docs.rs/actix-http/3.4.0) [![Documentation](https://docs.rs/actix-http/badge.svg?version=3.9.0)](https://docs.rs/actix-http/3.9.0)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-http/3.4.0/status.svg)](https://deps.rs/crate/actix-http/3.4.0) [![dependency status](https://deps.rs/crate/actix-http/3.9.0/status.svg)](https://deps.rs/crate/actix-http/3.9.0)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> <!-- prettier-ignore-end -->
## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http)
- Minimum Supported Rust Version (MSRV): 1.68
## Examples ## Examples
```rust ```rust

View file

@ -0,0 +1,20 @@
use std::time::SystemTime;
use actix_http::header::HttpDate;
use divan::{black_box, AllocProfiler, Bencher};
#[global_allocator]
static ALLOC: AllocProfiler = AllocProfiler::system();
#[divan::bench]
fn date_formatting(b: Bencher<'_, '_>) {
let now = SystemTime::now();
b.bench(|| {
black_box(HttpDate::from(black_box(now)).to_string());
})
}
fn main() {
divan::main();
}

View file

@ -8,7 +8,7 @@
use std::{convert::Infallible, io}; use std::{convert::Infallible, io};
use actix_http::{HttpService, Request, Response, StatusCode}; use actix_http::{body::BodyStream, HttpService, Request, Response, StatusCode};
use actix_server::Server; use actix_server::Server;
#[tokio::main(flavor = "current_thread")] #[tokio::main(flavor = "current_thread")]
@ -19,7 +19,12 @@ async fn main() -> io::Result<()> {
.bind("h2c-detect", ("127.0.0.1", 8080), || { .bind("h2c-detect", ("127.0.0.1", 8080), || {
HttpService::build() HttpService::build()
.finish(|_req: Request| async move { .finish(|_req: Request| async move {
Ok::<_, Infallible>(Response::build(StatusCode::OK).body("Hello!")) Ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(
futures_util::stream::iter([
Ok::<_, String>("123".into()),
Err("wertyuikmnbvcxdfty6t".to_owned()),
]),
)))
}) })
.tcp_auto_h2c() .tcp_auto_h2c()
})? })?

View file

@ -12,7 +12,7 @@
//! Protocol: HTTP/1.1 //! Protocol: HTTP/1.1
//! ``` //! ```
extern crate tls_rustls_021 as rustls; extern crate tls_rustls_023 as rustls;
use std::io; use std::io;
@ -36,31 +36,34 @@ async fn main() -> io::Result<()> {
); );
ok::<_, Error>(Response::ok().set_body(body)) ok::<_, Error>(Response::ok().set_body(body))
}) })
.rustls_021(rustls_config()) .rustls_0_23(rustls_config())
})? })?
.run() .run()
.await .await
} }
fn rustls_config() -> rustls::ServerConfig { fn rustls_config() -> rustls::ServerConfig {
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap(); let rcgen::CertifiedKey { cert, key_pair } =
let cert_file = cert.serialize_pem().unwrap(); rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let key_file = cert.serialize_private_key_pem(); let cert_file = cert.pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut io::BufReader::new(cert_file.as_bytes()); let cert_file = &mut io::BufReader::new(cert_file.as_bytes());
let key_file = &mut io::BufReader::new(key_file.as_bytes()); let key_file = &mut io::BufReader::new(key_file.as_bytes());
let cert_chain = rustls_pemfile::certs(cert_file) let cert_chain = rustls_pemfile::certs(cert_file)
.unwrap() .collect::<Result<Vec<_>, _>>()
.into_iter() .unwrap();
.map(rustls::Certificate) let mut keys = rustls_pemfile::pkcs8_private_keys(key_file)
.collect(); .collect::<Result<Vec<_>, _>>()
let mut keys = rustls_pemfile::pkcs8_private_keys(key_file).unwrap(); .unwrap();
let mut config = rustls::ServerConfig::builder() let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(cert_chain, rustls::PrivateKey(keys.remove(0))) .with_single_cert(
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
const H1_ALPN: &[u8] = b"http/1.1"; const H1_ALPN: &[u8] = b"http/1.1";

View file

@ -1,7 +1,7 @@
//! Sets up a WebSocket server over TCP and TLS. //! Sets up a WebSocket server over TCP and TLS.
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames. //! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
extern crate tls_rustls_021 as rustls; extern crate tls_rustls_023 as rustls;
use std::{ use std::{
io, io,
@ -30,7 +30,7 @@ async fn main() -> io::Result<()> {
.bind("tls", ("127.0.0.1", 8443), || { .bind("tls", ("127.0.0.1", 8443), || {
HttpService::build() HttpService::build()
.finish(handler) .finish(handler)
.rustls_021(tls_config()) .rustls_0_23(tls_config())
})? })?
.run() .run()
.await .await
@ -85,27 +85,27 @@ impl Stream for Heartbeat {
fn tls_config() -> rustls::ServerConfig { fn tls_config() -> rustls::ServerConfig {
use std::io::BufReader; use std::io::BufReader;
use rustls::{Certificate, PrivateKey};
use rustls_pemfile::{certs, pkcs8_private_keys}; use rustls_pemfile::{certs, pkcs8_private_keys};
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap(); let rcgen::CertifiedKey { cert, key_pair } =
let cert_file = cert.serialize_pem().unwrap(); rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let key_file = cert.serialize_private_key_pem(); let cert_file = cert.pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file) let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap();
.unwrap() let mut keys = pkcs8_private_keys(key_file)
.into_iter() .collect::<Result<Vec<_>, _>>()
.map(Certificate) .unwrap();
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap();
let mut config = rustls::ServerConfig::builder() let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(cert_chain, PrivateKey(keys.remove(0))) .with_single_cert(
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
config.alpn_protocols.push(b"http/1.1".to_vec()); config.alpn_protocols.push(b"http/1.1".to_vec());

View file

@ -531,7 +531,6 @@ where
mod tests { mod tests {
use actix_rt::pin; use actix_rt::pin;
use actix_utils::future::poll_fn; use actix_utils::future::poll_fn;
use bytes::{Bytes, BytesMut};
use futures_util::stream; use futures_util::stream;
use super::*; use super::*;

View file

@ -28,7 +28,7 @@ impl Date {
fn update(&mut self) { fn update(&mut self) {
self.pos = 0; self.pos = 0;
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap(); write!(self, "{}", httpdate::HttpDate::from(SystemTime::now())).unwrap();
} }
} }

View file

@ -50,10 +50,21 @@ impl<B: MessageBody> Encoder<B> {
} }
} }
fn empty() -> Self {
Encoder {
body: EncoderBody::Full { body: Bytes::new() },
encoder: None,
fut: None,
eof: true,
}
}
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self { pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
// no need to compress an empty body // no need to compress empty bodies
if matches!(body.size(), BodySize::None | BodySize::Sized(0)) { match body.size() {
return Self::none(); BodySize::None => return Self::none(),
BodySize::Sized(0) => return Self::empty(),
_ => {}
} }
let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING) let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING)

View file

@ -3,7 +3,7 @@
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error}; use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
use derive_more::{Display, Error, From}; use derive_more::{Display, Error, From};
pub use http::Error as HttpError; pub use http::{status::InvalidStatusCode, Error as HttpError};
use http::{uri::InvalidUri, StatusCode}; use http::{uri::InvalidUri, StatusCode};
use crate::{body::BoxBody, Response}; use crate::{body::BoxBody, Response};
@ -399,9 +399,7 @@ pub enum ContentTypeError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::io; use http::Error as HttpError;
use http::{Error as HttpError, StatusCode};
use super::*; use super::*;

View file

@ -198,9 +198,6 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::BytesMut;
use http::Method;
use super::*; use super::*;
use crate::HttpMessage as _; use crate::HttpMessage as _;

View file

@ -563,15 +563,8 @@ impl Decoder for PayloadDecoder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::{Bytes, BytesMut};
use http::{Method, Version};
use super::*; use super::*;
use crate::{ use crate::{header::SET_COOKIE, HttpMessage as _};
error::ParseError,
header::{HeaderName, SET_COOKIE},
HttpMessage as _,
};
impl PayloadType { impl PayloadType {
pub(crate) fn unwrap(self) -> PayloadDecoder { pub(crate) fn unwrap(self) -> PayloadDecoder {

View file

@ -512,8 +512,10 @@ where
} }
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {
let err = err.into();
tracing::error!("Response payload stream error: {err:?}");
this.flags.insert(Flags::FINISHED); this.flags.insert(Flags::FINISHED);
return Err(DispatchError::Body(err.into())); return Err(DispatchError::Body(err));
} }
Poll::Pending => return Ok(PollResponse::DoNothing), Poll::Pending => return Ok(PollResponse::DoNothing),
@ -549,6 +551,7 @@ where
} }
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {
tracing::error!("Response payload stream error: {err:?}");
this.flags.insert(Flags::FINISHED); this.flags.insert(Flags::FINISHED);
return Err(DispatchError::Body( return Err(DispatchError::Body(
Error::new_body().with_cause(err).into(), Error::new_body().with_cause(err).into(),
@ -703,7 +706,7 @@ where
req.head_mut().peer_addr = *this.peer_addr; req.head_mut().peer_addr = *this.peer_addr;
req.conn_data = this.conn_data.as_ref().map(Rc::clone); req.conn_data.clone_from(this.conn_data);
match this.codec.message_type() { match this.codec.message_type() {
// request has no payload // request has no payload

View file

@ -153,7 +153,7 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_020 { mod rustls_0_20 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@ -214,7 +214,7 @@ mod rustls_020 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_021 { mod rustls_0_21 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@ -274,6 +274,128 @@ mod rustls_021 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>>,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>>,
B: MessageBody,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>>,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>>,
B: MessageBody,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B, X, U> H1Service<T, S, B, X, U> impl<T, S, B, X, U> H1Service<T, S, B, X, U>
where where
S: ServiceFactory<Request, Config = ()>, S: ServiceFactory<Request, Config = ()>,
@ -419,6 +541,6 @@ where
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future { fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref()); let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
Dispatcher::new(io, self.flow.clone(), self.cfg.clone(), addr, conn_data) Dispatcher::new(io, Rc::clone(&self.flow), self.cfg.clone(), addr, conn_data)
} }
} }

View file

@ -4,7 +4,7 @@ use std::{
future::Future, future::Future,
marker::PhantomData, marker::PhantomData,
net, net,
pin::Pin, pin::{pin, Pin},
rc::Rc, rc::Rc,
task::{Context, Poll}, task::{Context, Poll},
}; };
@ -20,7 +20,6 @@ use h2::{
Ping, PingPong, Ping, PingPong,
}; };
use pin_project_lite::pin_project; use pin_project_lite::pin_project;
use tracing::{error, trace, warn};
use crate::{ use crate::{
body::{BodySize, BoxBody, MessageBody}, body::{BodySize, BoxBody, MessageBody},
@ -127,7 +126,7 @@ where
head.headers = parts.headers.into(); head.headers = parts.headers.into();
head.peer_addr = this.peer_addr; head.peer_addr = this.peer_addr;
req.conn_data = this.conn_data.as_ref().map(Rc::clone); req.conn_data.clone_from(&this.conn_data);
let fut = this.flow.service.call(req); let fut = this.flow.service.call(req);
let config = this.config.clone(); let config = this.config.clone();
@ -147,11 +146,13 @@ where
if let Err(err) = res { if let Err(err) = res {
match err { match err {
DispatchError::SendResponse(err) => { DispatchError::SendResponse(err) => {
trace!("Error sending HTTP/2 response: {:?}", err) tracing::trace!("Error sending response: {err:?}");
}
DispatchError::SendData(err) => {
tracing::warn!("Send data error: {err:?}");
} }
DispatchError::SendData(err) => warn!("{:?}", err),
DispatchError::ResponseBody(err) => { DispatchError::ResponseBody(err) => {
error!("Response payload stream error: {:?}", err) tracing::error!("Response payload stream error: {err:?}");
} }
} }
} }
@ -228,9 +229,9 @@ where
return Ok(()); return Ok(());
} }
// poll response body and send chunks to client let mut body = pin!(body);
actix_rt::pin!(body);
// poll response body and send chunks to client
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await { while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?; let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;

View file

@ -141,7 +141,7 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_020 { mod rustls_0_20 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@ -192,7 +192,7 @@ mod rustls_020 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_021 { mod rustls_0_21 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@ -242,6 +242,108 @@ mod rustls_021 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
mut config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = S::InitError,
> {
let mut protos = vec![b"h2".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
mut config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = S::InitError,
> {
let mut protos = vec![b"h2".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B> impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
where where
T: AsyncRead + AsyncWrite + Unpin + 'static, T: AsyncRead + AsyncWrite + Unpin + 'static,
@ -332,7 +434,7 @@ where
H2ServiceHandlerResponse { H2ServiceHandlerResponse {
state: State::Handshake( state: State::Handshake(
Some(self.flow.clone()), Some(Rc::clone(&self.flow)),
Some(self.cfg.clone()), Some(self.cfg.clone()),
addr, addr,
on_connect_data, on_connect_data,

View file

@ -13,8 +13,9 @@ use super::AsHeaderName;
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s. /// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
/// ///
/// # Examples /// # Examples
///
/// ``` /// ```
/// use actix_http::header::{self, HeaderMap, HeaderValue}; /// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// ///
/// let mut map = HeaderMap::new(); /// let mut map = HeaderMap::new();
/// ///
@ -29,6 +30,21 @@ use super::AsHeaderName;
/// ///
/// assert!(!map.contains_key(header::ORIGIN)); /// assert!(!map.contains_key(header::ORIGIN));
/// ``` /// ```
///
/// Construct a header map using the [`FromIterator`] implementation. Note that it uses the append
/// strategy, so duplicate header names are preserved.
///
/// ```
/// use actix_http::header::{self, HeaderMap, HeaderValue};
///
/// let headers = HeaderMap::from_iter([
/// (header::CONTENT_TYPE, HeaderValue::from_static("text/plain")),
/// (header::COOKIE, HeaderValue::from_static("foo=1")),
/// (header::COOKIE, HeaderValue::from_static("bar=1")),
/// ]);
///
/// assert_eq!(headers.len(), 3);
/// ```
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct HeaderMap { pub struct HeaderMap {
pub(crate) inner: AHashMap<HeaderName, Value>, pub(crate) inner: AHashMap<HeaderName, Value>,
@ -368,8 +384,8 @@ impl HeaderMap {
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html")); /// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
/// assert!(!removed.is_empty()); /// assert!(!removed.is_empty());
/// ``` /// ```
pub fn insert(&mut self, key: HeaderName, val: HeaderValue) -> Removed { pub fn insert(&mut self, name: HeaderName, val: HeaderValue) -> Removed {
let value = self.inner.insert(key, Value::one(val)); let value = self.inner.insert(name, Value::one(val));
Removed::new(value) Removed::new(value)
} }
@ -636,6 +652,16 @@ impl<'a> IntoIterator for &'a HeaderMap {
} }
} }
impl FromIterator<(HeaderName, HeaderValue)> for HeaderMap {
fn from_iter<T: IntoIterator<Item = (HeaderName, HeaderValue)>>(iter: T) -> Self {
iter.into_iter()
.fold(Self::new(), |mut map, (name, value)| {
map.append(name, value);
map
})
}
}
/// Convert a `http::HeaderMap` to our `HeaderMap`. /// Convert a `http::HeaderMap` to our `HeaderMap`.
impl From<http::HeaderMap> for HeaderMap { impl From<http::HeaderMap> for HeaderMap {
fn from(mut map: http::HeaderMap) -> Self { fn from(mut map: http::HeaderMap) -> Self {
@ -650,6 +676,13 @@ impl From<HeaderMap> for http::HeaderMap {
} }
} }
/// Convert our `&HeaderMap` to a `http::HeaderMap`.
impl From<&HeaderMap> for http::HeaderMap {
fn from(map: &HeaderMap) -> Self {
map.to_owned().into()
}
}
/// Iterator over removed, owned values with the same associated name. /// Iterator over removed, owned values with the same associated name.
/// ///
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`] /// Returned from methods that remove or replace items. See [`HeaderMap::insert`]

View file

@ -24,8 +24,7 @@ impl FromStr for HttpDate {
impl fmt::Display for HttpDate { impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let date_str = httpdate::fmt_http_date(self.0); httpdate::HttpDate::from(self.0).fmt(f)
f.write_str(&date_str)
} }
} }
@ -37,7 +36,7 @@ impl TryIntoHeaderValue for HttpDate {
let mut wrt = MutWriter(&mut buf); let mut wrt = MutWriter(&mut buf);
// unwrap: date output is known to be well formed and of known length // unwrap: date output is known to be well formed and of known length
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap(); write!(wrt, "{}", self).unwrap();
HeaderValue::from_maybe_shared(buf.split().freeze()) HeaderValue::from_maybe_shared(buf.split().freeze())
} }

View file

@ -80,18 +80,18 @@ mod tests {
#[test] #[test]
fn comma_delimited_parsing() { fn comma_delimited_parsing() {
let headers = vec![]; let headers = [];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap(); let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![0; 0]); assert_eq!(res, vec![0; 0]);
let headers = vec![ let headers = [
HeaderValue::from_static("1, 2"), HeaderValue::from_static("1, 2"),
HeaderValue::from_static("3,4"), HeaderValue::from_static("3,4"),
]; ];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap(); let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![1, 2, 3, 4]); assert_eq!(res, vec![1, 2, 3, 4]);
let headers = vec![ let headers = [
HeaderValue::from_static(""), HeaderValue::from_static(""),
HeaderValue::from_static(","), HeaderValue::from_static(","),
HeaderValue::from_static(" "), HeaderValue::from_static(" "),

View file

@ -6,7 +6,10 @@
//! | ------------------- | ------------------------------------------- | //! | ------------------- | ------------------------------------------- |
//! | `http2` | HTTP/2 support via [h2]. | //! | `http2` | HTTP/2 support via [h2]. |
//! | `openssl` | TLS support via [OpenSSL]. | //! | `openssl` | TLS support via [OpenSSL]. |
//! | `rustls` | TLS support via [rustls]. | //! | `rustls-0_20` | TLS support via rustls 0.20. |
//! | `rustls-0_21` | TLS support via rustls 0.21. |
//! | `rustls-0_22` | TLS support via rustls 0.22. |
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
//! | `compress-brotli` | Payload compression support: Brotli. | //! | `compress-brotli` | Payload compression support: Brotli. |
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. | //! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
//! | `compress-zstd` | Payload compression support: Zstd. | //! | `compress-zstd` | Payload compression support: Zstd. |
@ -17,8 +20,6 @@
//! [rustls]: https://crates.io/crates/rustls //! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns //! [trust-dns]: https://crates.io/crates/trust-dns
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow( #![allow(
clippy::type_complexity, clippy::type_complexity,
clippy::too_many_arguments, clippy::too_many_arguments,
@ -28,7 +29,7 @@
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
pub use ::http::{uri, uri::Uri, Method, StatusCode, Version}; pub use http::{uri, uri::Uri, Method, StatusCode, Version};
pub mod body; pub mod body;
mod builder; mod builder;
@ -58,7 +59,7 @@ pub mod ws;
#[allow(deprecated)] #[allow(deprecated)]
pub use self::payload::PayloadStream; pub use self::payload::PayloadStream;
#[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))] #[cfg(feature = "__tls")]
pub use self::service::TlsAcceptorConfig; pub use self::service::TlsAcceptorConfig;
pub use self::{ pub use self::{
builder::HttpServiceBuilder, builder::HttpServiceBuilder,

View file

@ -66,7 +66,7 @@ impl<T: Head> ops::DerefMut for Message<T> {
impl<T: Head> Drop for Message<T> { impl<T: Head> Drop for Message<T> {
fn drop(&mut self) { fn drop(&mut self) {
T::with_pool(|p| p.release(self.head.clone())) T::with_pool(|p| p.release(Rc::clone(&self.head)))
} }
} }

View file

@ -5,7 +5,7 @@
use std::cell::RefCell; use std::cell::RefCell;
thread_local! { thread_local! {
static NOTIFY_DROPPED: RefCell<Option<bool>> = RefCell::new(None); static NOTIFY_DROPPED: RefCell<Option<bool>> = const { RefCell::new(None) };
} }
/// Check if the spawned task is dropped. /// Check if the spawned task is dropped.

View file

@ -16,7 +16,10 @@ pub struct RequestHead {
pub uri: Uri, pub uri: Uri,
pub version: Version, pub version: Version,
pub headers: HeaderMap, pub headers: HeaderMap,
/// Will only be None when called in unit tests unless set manually.
pub peer_addr: Option<net::SocketAddr>, pub peer_addr: Option<net::SocketAddr>,
flags: Flags, flags: Flags,
} }

View file

@ -173,7 +173,7 @@ impl<P> Request<P> {
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of /// Peer address is the directly connected peer's socket address. If a proxy is used in front of
/// the Actix Web server, then it would be address of this proxy. /// the Actix Web server, then it would be address of this proxy.
/// ///
/// Will only return None when called in unit tests. /// Will only return None when called in unit tests unless set manually.
#[inline] #[inline]
pub fn peer_addr(&self) -> Option<net::SocketAddr> { pub fn peer_addr(&self) -> Option<net::SocketAddr> {
self.head().peer_addr self.head().peer_addr

View file

@ -351,12 +351,9 @@ mod tests {
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain"); assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
let resp = Response::build(StatusCode::OK) let resp = Response::build(StatusCode::OK)
.content_type(mime::APPLICATION_JAVASCRIPT_UTF_8) .content_type(mime::TEXT_JAVASCRIPT)
.body(Bytes::new()); .body(Bytes::new());
assert_eq!( assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript");
resp.headers().get(CONTENT_TYPE).unwrap(),
"application/javascript; charset=utf-8"
);
} }
#[test] #[test]

View file

@ -241,13 +241,13 @@ where
} }
/// Configuration options used when accepting TLS connection. /// Configuration options used when accepting TLS connection.
#[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))] #[cfg(feature = "__tls")]
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct TlsAcceptorConfig { pub struct TlsAcceptorConfig {
pub(crate) handshake_timeout: Option<std::time::Duration>, pub(crate) handshake_timeout: Option<std::time::Duration>,
} }
#[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))] #[cfg(feature = "__tls")]
impl TlsAcceptorConfig { impl TlsAcceptorConfig {
/// Set TLS handshake timeout duration. /// Set TLS handshake timeout duration.
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self { pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
@ -353,12 +353,12 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_020 { mod rustls_0_20 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{ use actix_tls::accept::{
rustls::{reexports::ServerConfig, Acceptor, TlsStream}, rustls_0_20::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError, TlsError,
}; };
@ -389,7 +389,7 @@ mod rustls_020 {
U::Error: fmt::Display + Into<Response<BoxBody>>, U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create Rustls based service. /// Create Rustls v0.20 based service.
pub fn rustls( pub fn rustls(
self, self,
config: ServerConfig, config: ServerConfig,
@ -403,7 +403,7 @@ mod rustls_020 {
self.rustls_with_config(config, TlsAcceptorConfig::default()) self.rustls_with_config(config, TlsAcceptorConfig::default())
} }
/// Create Rustls based service with custom TLS acceptor configuration. /// Create Rustls v0.20 based service with custom TLS acceptor configuration.
pub fn rustls_with_config( pub fn rustls_with_config(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@ -449,7 +449,7 @@ mod rustls_020 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_021 { mod rustls_0_21 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@ -485,7 +485,7 @@ mod rustls_021 {
U::Error: fmt::Display + Into<Response<BoxBody>>, U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create Rustls based service. /// Create Rustls v0.21 based service.
pub fn rustls_021( pub fn rustls_021(
self, self,
config: ServerConfig, config: ServerConfig,
@ -499,7 +499,7 @@ mod rustls_021 {
self.rustls_021_with_config(config, TlsAcceptorConfig::default()) self.rustls_021_with_config(config, TlsAcceptorConfig::default())
} }
/// Create Rustls based service with custom TLS acceptor configuration. /// Create Rustls v0.21 based service with custom TLS acceptor configuration.
pub fn rustls_021_with_config( pub fn rustls_021_with_config(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@ -544,6 +544,198 @@ mod rustls_021 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
self.rustls_0_22_with_config(config, TlsAcceptorConfig::default())
}
/// Create Rustls v0.22 based service with custom TLS acceptor configuration.
pub fn rustls_0_22_with_config(
self,
mut config: ServerConfig,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
let mut acceptor = Acceptor::new(config);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.and_then(|io: TlsStream<TcpStream>| async {
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2
} else {
Protocol::Http1
}
} else {
Protocol::Http1
};
let peer_addr = io.get_ref().0.peer_addr().ok();
Ok((io, proto, peer_addr))
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
self.rustls_0_23_with_config(config, TlsAcceptorConfig::default())
}
/// Create Rustls v0.23 based service with custom TLS acceptor configuration.
pub fn rustls_0_23_with_config(
self,
mut config: ServerConfig,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
let mut acceptor = Acceptor::new(config);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.and_then(|io: TlsStream<TcpStream>| async {
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2
} else {
Protocol::Http1
}
} else {
Protocol::Http1
};
let peer_addr = io.get_ref().0.peer_addr().ok();
Ok((io, proto, peer_addr))
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)> impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)>
for HttpService<T, S, B, X, U> for HttpService<T, S, B, X, U>
where where
@ -718,7 +910,7 @@ where
handshake: Some(( handshake: Some((
crate::h2::handshake_with_timeout(io, &self.cfg), crate::h2::handshake_with_timeout(io, &self.cfg),
self.cfg.clone(), self.cfg.clone(),
self.flow.clone(), Rc::clone(&self.flow),
conn_data, conn_data,
peer_addr, peer_addr,
)), )),
@ -734,7 +926,7 @@ where
state: State::H1 { state: State::H1 {
dispatcher: h1::Dispatcher::new( dispatcher: h1::Dispatcher::new(
io, io,
self.flow.clone(), Rc::clone(&self.flow),
self.cfg.clone(), self.cfg.clone(),
peer_addr, peer_addr,
conn_data, conn_data,

View file

@ -159,8 +159,8 @@ impl TestBuffer {
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn clone(&self) -> Self { pub(crate) fn clone(&self) -> Self {
Self { Self {
read_buf: self.read_buf.clone(), read_buf: Rc::clone(&self.read_buf),
write_buf: self.write_buf.clone(), write_buf: Rc::clone(&self.write_buf),
err: self.err.clone(), err: self.err.clone(),
} }
} }

View file

@ -178,14 +178,14 @@ impl Parser {
}; };
if payload_len < 126 { if payload_len < 126 {
dst.reserve(p_len + 2 + if mask { 4 } else { 0 }); dst.reserve(p_len + 2);
dst.put_slice(&[one, two | payload_len as u8]); dst.put_slice(&[one, two | payload_len as u8]);
} else if payload_len <= 65_535 { } else if payload_len <= 65_535 {
dst.reserve(p_len + 4 + if mask { 4 } else { 0 }); dst.reserve(p_len + 4);
dst.put_slice(&[one, two | 126]); dst.put_slice(&[one, two | 126]);
dst.put_u16(payload_len as u16); dst.put_u16(payload_len as u16);
} else { } else {
dst.reserve(p_len + 10 + if mask { 4 } else { 0 }); dst.reserve(p_len + 10);
dst.put_slice(&[one, two | 127]); dst.put_slice(&[one, two | 127]);
dst.put_u64(payload_len as u64); dst.put_u64(payload_len as u64);
}; };

View file

@ -221,7 +221,7 @@ pub fn handshake_response(req: &RequestHead) -> ResponseBuilder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{header, test::TestRequest, Method}; use crate::{header, test::TestRequest};
#[test] #[test]
fn test_handshake() { fn test_handshake() {

View file

@ -1,7 +1,4 @@
use std::{ use std::fmt;
convert::{From, Into},
fmt,
};
use base64::prelude::*; use base64::prelude::*;
use tracing::error; use tracing::error;

View file

@ -42,9 +42,11 @@ where
} }
fn tls_config() -> SslAcceptor { fn tls_config() -> SslAcceptor {
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap(); let rcgen::CertifiedKey { cert, key_pair } =
let cert_file = cert.serialize_pem().unwrap(); rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let key_file = cert.serialize_private_key_pem(); let cert_file = cert.pem();
let key_file = key_pair.serialize_pem();
let cert = X509::from_pem(cert_file.as_bytes()).unwrap(); let cert = X509::from_pem(cert_file.as_bytes()).unwrap();
let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap(); let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();

View file

@ -1,6 +1,6 @@
#![cfg(feature = "rustls-0_21")] #![cfg(feature = "rustls-0_23")]
extern crate tls_rustls_021 as rustls; extern crate tls_rustls_023 as rustls;
use std::{ use std::{
convert::Infallible, convert::Infallible,
@ -20,13 +20,13 @@ use actix_http::{
use actix_http_test::test_server; use actix_http_test::test_server;
use actix_rt::pin; use actix_rt::pin;
use actix_service::{fn_factory_with_config, fn_service}; use actix_service::{fn_factory_with_config, fn_service};
use actix_tls::connect::rustls_0_21::webpki_roots_cert_store; use actix_tls::connect::rustls_0_23::webpki_roots_cert_store;
use actix_utils::future::{err, ok, poll_fn}; use actix_utils::future::{err, ok, poll_fn};
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use futures_core::{ready, Stream}; use futures_core::{ready, Stream};
use futures_util::stream::once; use futures_util::stream::once;
use rustls::{Certificate, PrivateKey, ServerConfig as RustlsServerConfig, ServerName}; use rustls::{pki_types::ServerName, ServerConfig as RustlsServerConfig};
use rustls_pemfile::{certs, pkcs8_private_keys}; use rustls_pemfile::{certs, pkcs8_private_keys};
async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError> async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError>
@ -52,24 +52,25 @@ where
} }
fn tls_config() -> RustlsServerConfig { fn tls_config() -> RustlsServerConfig {
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap(); let rcgen::CertifiedKey { cert, key_pair } =
let cert_file = cert.serialize_pem().unwrap(); rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let key_file = cert.serialize_private_key_pem(); let cert_file = cert.pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file) let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap();
.unwrap() let mut keys = pkcs8_private_keys(key_file)
.into_iter() .collect::<Result<Vec<_>, _>>()
.map(Certificate) .unwrap();
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap();
let mut config = RustlsServerConfig::builder() let mut config = RustlsServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(cert_chain, PrivateKey(keys.remove(0))) .with_single_cert(
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
@ -83,7 +84,6 @@ pub fn get_negotiated_alpn_protocol(
client_alpn_protocol: &[u8], client_alpn_protocol: &[u8],
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
let mut config = rustls::ClientConfig::builder() let mut config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(webpki_roots_cert_store()) .with_root_certificates(webpki_roots_cert_store())
.with_no_client_auth(); .with_no_client_auth();
@ -109,7 +109,7 @@ async fn h1() -> io::Result<()> {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok())) .h1(|_| ok::<_, Error>(Response::ok()))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -123,7 +123,7 @@ async fn h2() -> io::Result<()> {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok())) .h2(|_| ok::<_, Error>(Response::ok()))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -141,7 +141,7 @@ async fn h1_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_11); assert_eq!(req.version(), Version::HTTP_11);
ok::<_, Error>(Response::ok()) ok::<_, Error>(Response::ok())
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -159,7 +159,7 @@ async fn h2_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_2); assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok()) ok::<_, Error>(Response::ok())
}) })
.rustls_021_with_config( .rustls_0_23_with_config(
tls_config(), tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)), TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
) )
@ -180,7 +180,7 @@ async fn h2_body1() -> io::Result<()> {
let body = load_body(req.take_payload()).await?; let body = load_body(req.take_payload()).await?;
Ok::<_, Error>(Response::ok().set_body(body)) Ok::<_, Error>(Response::ok().set_body(body))
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -206,7 +206,7 @@ async fn h2_content_length() {
]; ];
ok::<_, Infallible>(Response::new(statuses[indx])) ok::<_, Infallible>(Response::new(statuses[indx]))
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -278,7 +278,7 @@ async fn h2_headers() {
} }
ok::<_, Infallible>(config.body(data.clone())) ok::<_, Infallible>(config.body(data.clone()))
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -317,7 +317,7 @@ async fn h2_body2() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -334,7 +334,7 @@ async fn h2_head_empty() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -360,7 +360,7 @@ async fn h2_head_binary() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -385,7 +385,7 @@ async fn h2_head_binary2() {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -411,7 +411,7 @@ async fn h2_body_length() {
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)), Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),
) )
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -435,7 +435,7 @@ async fn h2_body_chunked_explicit() {
.body(BodyStream::new(body)), .body(BodyStream::new(body)),
) )
}) })
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -464,7 +464,7 @@ async fn h2_response_http_error_handling() {
) )
})) }))
})) }))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -494,7 +494,7 @@ async fn h2_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest)) .h2(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -511,7 +511,7 @@ async fn h1_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h1(|_| err::<Response<BoxBody>, _>(BadRequest)) .h1(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_021(tls_config()) .rustls_0_23(tls_config())
}) })
.await; .await;
@ -534,7 +534,7 @@ async fn alpn_h1() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok())) .h1(|_| ok::<_, Error>(Response::ok()))
.rustls_021(config) .rustls_0_23(config)
}) })
.await; .await;
@ -556,7 +556,7 @@ async fn alpn_h2() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok())) .h2(|_| ok::<_, Error>(Response::ok()))
.rustls_021(config) .rustls_0_23(config)
}) })
.await; .await;
@ -582,7 +582,7 @@ async fn alpn_h2_1() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.finish(|_| ok::<_, Error>(Response::ok())) .finish(|_| ok::<_, Error>(Response::ok()))
.rustls_021(config) .rustls_0_23(config)
}) })
.await; .await;

View file

@ -2,6 +2,10 @@
## Unreleased ## Unreleased
## 0.7.0
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.1 ## 0.6.1
- Update `syn` dependency to `2`. - Update `syn` dependency to `2`.

View file

@ -1,13 +1,14 @@
[package] [package]
name = "actix-multipart-derive" name = "actix-multipart-derive"
version = "0.6.1" version = "0.7.0"
authors = ["Jacob Halsey <jacob@jhalsey.com>"] authors = ["Jacob Halsey <jacob@jhalsey.com>"]
description = "Multipart form derive macro for Actix Web" description = "Multipart form derive macro for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage.workspace = true
repository = "https://github.com/actix/actix-web" repository.workspace = true
license = "MIT OR Apache-2.0" license.workspace = true
edition = "2021" edition.workspace = true
rust-version.workspace = true
[package.metadata.docs.rs] [package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
@ -24,7 +25,10 @@ quote = "1"
syn = "2" syn = "2"
[dev-dependencies] [dev-dependencies]
actix-multipart = "0.6" actix-multipart = "0.7"
actix-web = "4" actix-web = "4"
rustversion = "1" rustversion = "1"
trybuild = "1" trybuild = "1"
[lints]
workspace = true

View file

@ -1,17 +1,16 @@
# actix-multipart-derive # `actix-multipart-derive`
> The derive macro implementation for actix-multipart-derive. > The derive macro implementation for actix-multipart-derive.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive) [![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive)
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart-derive/0.6.1) [![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.7.0)](https://docs.rs/actix-multipart-derive/0.7.0)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.6.1) [![dependency status](https://deps.rs/crate/actix-multipart-derive/0.7.0/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.7.0)
[![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive) [![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-multipart-derive)
- Minimum Supported Rust Version (MSRV): 1.68

View file

@ -2,8 +2,6 @@
//! //!
//! See [`macro@MultipartForm`] for usage examples. //! See [`macro@MultipartForm`] for usage examples.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -138,7 +136,7 @@ struct ParsedField<'t> {
/// `#[multipart(duplicate_field = "<behavior>")]` attribute: /// `#[multipart(duplicate_field = "<behavior>")]` attribute:
/// ///
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted. /// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
/// - "deny": A `MultipartError::UnsupportedField` error response is returned. /// - "deny": A `MultipartError::UnknownField` error response is returned.
/// - "replace": Each field is processed, but only the last one is persisted. /// - "replace": Each field is processed, but only the last one is persisted.
/// ///
/// Note that `Vec` fields will ignore this option. /// Note that `Vec` fields will ignore this option.
@ -229,7 +227,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
// Return value when a field name is not supported by the form // Return value when a field name is not supported by the form
let unknown_field_result = if attrs.deny_unknown_fields { let unknown_field_result = if attrs.deny_unknown_fields {
quote!(::std::result::Result::Err( quote!(::std::result::Result::Err(
::actix_multipart::MultipartError::UnsupportedField(field.name().to_string()) ::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string())
)) ))
} else { } else {
quote!(::std::result::Result::Ok(())) quote!(::std::result::Result::Ok(()))
@ -292,7 +290,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
limits: &'t mut ::actix_multipart::form::Limits, limits: &'t mut ::actix_multipart::form::Limits,
state: &'t mut ::actix_multipart::form::State, state: &'t mut ::actix_multipart::form::State,
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> { ) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
match field.name() { match field.name().unwrap() {
#handle_field_impl #handle_field_impl
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)), _ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
} }

View file

@ -1,4 +1,4 @@
#[rustversion::stable(1.68)] // MSRV #[rustversion::stable(1.72)] // MSRV
#[test] #[test]
fn compile_macros() { fn compile_macros() {
let t = trybuild::TestCases::new(); let t = trybuild::TestCases::new();

View file

@ -2,6 +2,31 @@
## Unreleased ## Unreleased
## 0.7.2
- Fix re-exported version of `actix-multipart-derive`.
## 0.7.1
- Expose `LimitExceeded` error type.
## 0.7.0
- Add `MultipartError::ContentTypeIncompatible` variant.
- Add `MultipartError::ContentDispositionNameMissing` variant.
- Add `Field::bytes()` method.
- Rename `MultipartError::{NoContentDisposition => ContentDispositionMissing}` variant.
- Rename `MultipartError::{NoContentType => ContentTypeMissing}` variant.
- Rename `MultipartError::{ParseContentType => ContentTypeParse}` variant.
- Rename `MultipartError::{Boundary => BoundaryMissing}` variant.
- Rename `MultipartError::{UnsupportedField => UnknownField}` variant.
- Remove top-level re-exports of `test` utilities.
## 0.6.2
- Add testing utilities under new module `test`.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.1 ## 0.6.1
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.

View file

@ -1,32 +1,47 @@
[package] [package]
name = "actix-multipart" name = "actix-multipart"
version = "0.6.1" version = "0.7.2"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Jacob Halsey <jacob@jhalsey.com>", "Jacob Halsey <jacob@jhalsey.com>",
"Rob Ede <robjtede@icloud.com>",
] ]
description = "Multipart form support for Actix Web" description = "Multipart request & form support for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "actix", "web", "multipart", "form"]
homepage = "https://actix.rs" homepage.workspace = true
repository = "https://github.com/actix/actix-web" repository.workspace = true
license = "MIT OR Apache-2.0" license.workspace = true
edition = "2021" edition.workspace = true
[package.metadata.docs.rs] [package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
all-features = true all-features = true
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_multipart_derive::*",
"actix_utils::*",
"actix_web::*",
"bytes::*",
"futures_core::*",
"mime::*",
"serde_json::*",
"serde_plain::*",
"serde::*",
"tempfile::*",
]
[features] [features]
default = ["tempfile", "derive"] default = ["tempfile", "derive"]
derive = ["actix-multipart-derive"] derive = ["actix-multipart-derive"]
tempfile = ["dep:tempfile", "tokio/fs"] tempfile = ["dep:tempfile", "tokio/fs"]
[dependencies] [dependencies]
actix-multipart-derive = { version = "=0.6.1", optional = true } actix-multipart-derive = { version = "=0.7.0", optional = true }
actix-utils = "3" actix-utils = "3"
actix-web = { version = "4", default-features = false } actix-web = { version = "4", default-features = false }
bytes = "1"
derive_more = "0.99.5" derive_more = "0.99.5"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
@ -35,6 +50,7 @@ local-waker = "0.1"
log = "0.4" log = "0.4"
memchr = "2.5" memchr = "2.5"
mime = "0.3" mime = "0.3"
rand = "0.8"
serde = "1" serde = "1"
serde_json = "1" serde_json = "1"
serde_plain = "1" serde_plain = "1"
@ -46,7 +62,15 @@ actix-http = "3"
actix-multipart-rfc7578 = "0.10" actix-multipart-rfc7578 = "0.10"
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1" actix-test = "0.1"
actix-web = "4"
assert_matches = "1"
awc = "3" awc = "3"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-test = "0.3"
multer = "3"
tokio = { version = "1.24.2", features = ["sync"] } tokio = { version = "1.24.2", features = ["sync"] }
tokio-stream = "0.1" tokio-stream = "0.1"
[lints]
workspace = true

View file

@ -1,17 +1,74 @@
# actix-multipart # `actix-multipart`
> Multipart form support for Actix Web. <!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart/0.6.1) [![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.7.2)](https://docs.rs/actix-multipart/0.7.2)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart/0.6.1) [![dependency status](https://deps.rs/crate/actix-multipart/0.7.2/status.svg)](https://deps.rs/crate/actix-multipart/0.7.2)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart) [![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-multipart) <!-- cargo-rdme start -->
- Minimum Supported Rust Version (MSRV): 1.68
Multipart request & form support for Actix Web.
The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level extractor which supports reading [multipart fields](Field), in the order they are sent by the client.
Due to additional requirements for `multipart/form-data` requests, the higher level [`MultipartForm`] extractor and derive macro only supports this media type.
## Examples
```rust
use actix_web::{post, App, HttpServer, Responder};
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Metadata {
name: String,
}
#[derive(Debug, MultipartForm)]
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
}
#[post("/videos")]
pub async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}",
form.json.name, form.file.size
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(move || App::new().service(post_video))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
```
cURL request:
```sh
curl -v --request POST \
--url http://localhost:8080/videos \
-F 'json={"name": "Cargo.lock"};type=application/json' \
-F file=@./Cargo.lock
```
[`MultipartForm`]: struct@form::MultipartForm
<!-- cargo-rdme end -->
[More available in the examples repo &rarr;](https://github.com/actix/examples/tree/master/forms/multipart)

View file

@ -0,0 +1,36 @@
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Metadata {
name: String,
}
#[derive(Debug, MultipartForm)]
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
}
#[post("/videos")]
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
form.json.name,
form.file.size,
form.file.file.path().display(),
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
}

View file

@ -10,78 +10,96 @@ use derive_more::{Display, Error, From};
/// A set of errors that can occur during parsing multipart streams. /// A set of errors that can occur during parsing multipart streams.
#[derive(Debug, Display, From, Error)] #[derive(Debug, Display, From, Error)]
#[non_exhaustive] #[non_exhaustive]
pub enum MultipartError { pub enum Error {
/// Content-Disposition header is not found or is not equal to "form-data". /// Could not find Content-Type header.
#[display(fmt = "Could not find Content-Type header")]
ContentTypeMissing,
/// Could not parse Content-Type header.
#[display(fmt = "Could not parse Content-Type header")]
ContentTypeParse,
/// Parsed Content-Type did not have "multipart" top-level media type.
/// ///
/// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a /// Also raised when extracting a [`MultipartForm`] from a request that does not have the
/// Content-Disposition header must always be present and equal to "form-data". /// "multipart/form-data" media type.
#[display(fmt = "No Content-Disposition `form-data` header")] ///
NoContentDisposition, /// [`MultipartForm`]: struct@crate::form::MultipartForm
#[display(fmt = "Parsed Content-Type did not have "multipart" top-level media type")]
ContentTypeIncompatible,
/// Content-Type header is not found /// Multipart boundary is not found.
#[display(fmt = "No Content-Type header found")]
NoContentType,
/// Can not parse Content-Type header
#[display(fmt = "Can not parse Content-Type header")]
ParseContentType,
/// Multipart boundary is not found
#[display(fmt = "Multipart boundary is not found")] #[display(fmt = "Multipart boundary is not found")]
Boundary, BoundaryMissing,
/// Nested multipart is not supported /// Content-Disposition header was not found or not of disposition type "form-data" when parsing
/// a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always be present and have a disposition type of "form-data".
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionMissing,
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always include a "name" parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionNameMissing,
/// Nested multipart is not supported.
#[display(fmt = "Nested multipart is not supported")] #[display(fmt = "Nested multipart is not supported")]
Nested, Nested,
/// Multipart stream is incomplete /// Multipart stream is incomplete.
#[display(fmt = "Multipart stream is incomplete")] #[display(fmt = "Multipart stream is incomplete")]
Incomplete, Incomplete,
/// Error during field parsing /// Field parsing failed.
#[display(fmt = "{}", _0)] #[display(fmt = "Error during field parsing")]
Parse(ParseError), Parse(ParseError),
/// Payload error /// HTTP payload error.
#[display(fmt = "{}", _0)] #[display(fmt = "Payload error")]
Payload(PayloadError), Payload(PayloadError),
/// Not consumed /// Stream is not consumed.
#[display(fmt = "Multipart stream is not consumed")] #[display(fmt = "Stream is not consumed")]
NotConsumed, NotConsumed,
/// An error from a field handler in a form /// Form field handler raised error.
#[display( #[display(fmt = "An error occurred processing field: {name}")]
fmt = "An error occurred processing field `{}`: {}",
field_name,
source
)]
Field { Field {
field_name: String, name: String,
source: actix_web::Error, source: actix_web::Error,
}, },
/// Duplicate field /// Duplicate field found (for structure that opted-in to denying duplicate fields).
#[display(fmt = "Duplicate field found for: `{}`", _0)] #[display(fmt = "Duplicate field found: {_0}")]
#[from(ignore)] #[from(ignore)]
DuplicateField(#[error(not(source))] String), DuplicateField(#[error(not(source))] String),
/// Missing field /// Required field is missing.
#[display(fmt = "Field with name `{}` is required", _0)] #[display(fmt = "Required field is missing: {_0}")]
#[from(ignore)] #[from(ignore)]
MissingField(#[error(not(source))] String), MissingField(#[error(not(source))] String),
/// Unknown field /// Unknown field (for structure that opted-in to denying unknown fields).
#[display(fmt = "Unsupported field `{}`", _0)] #[display(fmt = "Unknown field: {_0}")]
#[from(ignore)] #[from(ignore)]
UnsupportedField(#[error(not(source))] String), UnknownField(#[error(not(source))] String),
} }
/// Return `BadRequest` for `MultipartError` /// Return `BadRequest` for `MultipartError`.
impl ResponseError for MultipartError { impl ResponseError for Error {
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
match &self { match &self {
MultipartError::Field { source, .. } => source.as_response_error().status_code(), Error::Field { source, .. } => source.as_response_error().status_code(),
Error::ContentTypeIncompatible => StatusCode::UNSUPPORTED_MEDIA_TYPE,
_ => StatusCode::BAD_REQUEST, _ => StatusCode::BAD_REQUEST,
} }
} }
@ -93,7 +111,7 @@ mod tests {
#[test] #[test]
fn test_multipart_error() { fn test_multipart_error() {
let resp = MultipartError::Boundary.error_response(); let resp = Error::BoundaryMissing.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST); assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
} }
} }

View file

@ -1,21 +1,20 @@
//! Multipart payload support
use actix_utils::future::{ready, Ready}; use actix_utils::future::{ready, Ready};
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest}; use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
use crate::server::Multipart; use crate::multipart::Multipart;
/// Get request's payload as multipart stream. /// Extract request's payload as multipart stream.
/// ///
/// Content-type: multipart/form-data; /// Content-type: multipart/*;
/// ///
/// # Examples /// # Examples
///
/// ``` /// ```
/// use actix_web::{web, HttpResponse, Error}; /// use actix_web::{web, HttpResponse};
/// use actix_multipart::Multipart; /// use actix_multipart::Multipart;
/// use futures_util::StreamExt as _; /// use futures_util::StreamExt as _;
/// ///
/// async fn index(mut payload: Multipart) -> Result<HttpResponse, Error> { /// async fn index(mut payload: Multipart) -> actix_web::Result<HttpResponse> {
/// // iterate over multipart stream /// // iterate over multipart stream
/// while let Some(item) = payload.next().await { /// while let Some(item) = payload.next().await {
/// let mut field = item?; /// let mut field = item?;
@ -26,7 +25,7 @@ use crate::server::Multipart;
/// } /// }
/// } /// }
/// ///
/// Ok(HttpResponse::Ok().into()) /// Ok(HttpResponse::Ok().finish())
/// } /// }
/// ``` /// ```
impl FromRequest for Multipart { impl FromRequest for Multipart {
@ -35,9 +34,6 @@ impl FromRequest for Multipart {
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ready(Ok(match Multipart::boundary(req.headers()) { ready(Ok(Multipart::from_req(req, payload)))
Ok(boundary) => Multipart::from_boundary(boundary, payload.take()),
Err(err) => Multipart::from_error(err),
}))
} }
} }

View file

@ -0,0 +1,501 @@
use std::{
cell::RefCell,
cmp, fmt,
future::poll_fn,
mem,
pin::Pin,
rc::Rc,
task::{ready, Context, Poll},
};
use actix_web::{
error::PayloadError,
http::header::{self, ContentDisposition, HeaderMap},
web::{Bytes, BytesMut},
};
use derive_more::{Display, Error};
use futures_core::Stream;
use mime::Mime;
use crate::{
error::Error,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
};
/// Error type returned from [`Field::bytes()`] when field data is larger than limit.
#[derive(Debug, Display, Error)]
#[display(fmt = "size limit exceeded while collecting field data")]
#[non_exhaustive]
pub struct LimitExceeded;
/// A single field in a multipart stream.
pub struct Field {
/// Field's Content-Type.
content_type: Option<Mime>,
/// Field's Content-Disposition.
content_disposition: Option<ContentDisposition>,
/// Form field name.
///
/// A non-optional storage for form field names to avoid unwraps in `form` module. Will be an
/// empty string in non-form contexts.
///
// INVARIANT: always non-empty when request content-type is multipart/form-data.
pub(crate) form_field_name: String,
/// Field's header map.
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
}
impl Field {
pub(crate) fn new(
content_type: Option<Mime>,
content_disposition: Option<ContentDisposition>,
form_field_name: Option<String>,
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
) -> Self {
Field {
content_type,
content_disposition,
form_field_name: form_field_name.unwrap_or_default(),
headers,
inner,
safety,
}
}
/// Returns a reference to the field's header map.
pub fn headers(&self) -> &HeaderMap {
&self.headers
}
/// Returns a reference to the field's content (mime) type, if it is supplied by the client.
///
/// According to [RFC 7578](https://www.rfc-editor.org/rfc/rfc7578#section-4.4), if it is not
/// present, it should default to "text/plain". Note it is the responsibility of the client to
/// provide the appropriate content type, there is no attempt to validate this by the server.
pub fn content_type(&self) -> Option<&Mime> {
self.content_type.as_ref()
}
/// Returns this field's parsed Content-Disposition header, if set.
///
/// # Validation
///
/// Per [RFC 7578 §4.2], the parts of a multipart/form-data payload MUST contain a
/// Content-Disposition header field where the disposition type is `form-data` and MUST also
/// contain an additional parameter of `name` with its value being the original field name from
/// the form. This requirement is enforced during extraction for multipart/form-data requests,
/// but not other kinds of multipart requests (such as multipart/related).
///
/// As such, it is safe to `.unwrap()` calls `.content_disposition()` if you've verified.
///
/// The [`name()`](Self::name) method is also provided as a convenience for obtaining the
/// aforementioned name parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
pub fn content_disposition(&self) -> Option<&ContentDisposition> {
self.content_disposition.as_ref()
}
/// Returns the field's name, if set.
///
/// See [`content_disposition()`](Self::content_disposition) regarding guarantees on presence of
/// the "name" field.
pub fn name(&self) -> Option<&str> {
self.content_disposition()?.get_name()
}
/// Collects the raw field data, up to `limit` bytes.
///
/// # Errors
///
/// Any errors produced by the data stream are returned as `Ok(Err(Error))` immediately.
///
/// If the buffered data size would exceed `limit`, an `Err(LimitExceeded)` is returned. Note
/// that, in this case, the full data stream is exhausted before returning the error so that
/// subsequent fields can still be read. To better defend against malicious/infinite requests,
/// it is advisable to also put a timeout on this call.
pub async fn bytes(&mut self, limit: usize) -> Result<Result<Bytes, Error>, LimitExceeded> {
/// Sensible default (2kB) for initial, bounded allocation when collecting body bytes.
const INITIAL_ALLOC_BYTES: usize = 2 * 1024;
let mut exceeded_limit = false;
let mut buf = BytesMut::with_capacity(INITIAL_ALLOC_BYTES);
let mut field = Pin::new(self);
match poll_fn(|cx| loop {
match ready!(field.as_mut().poll_next(cx)) {
// if already over limit, discard chunk to advance multipart request
Some(Ok(_chunk)) if exceeded_limit => {}
// if limit is exceeded set flag to true and continue
Some(Ok(chunk)) if buf.len() + chunk.len() > limit => {
exceeded_limit = true;
// eagerly de-allocate field data buffer
let _ = mem::take(&mut buf);
}
Some(Ok(chunk)) => buf.extend_from_slice(&chunk),
None => return Poll::Ready(Ok(())),
Some(Err(err)) => return Poll::Ready(Err(err)),
}
})
.await
{
// propagate error returned from body poll
Err(err) => Ok(Err(err)),
// limit was exceeded while reading body
Ok(()) if exceeded_limit => Err(LimitExceeded),
// otherwise return body buffer
Ok(()) => Ok(Ok(buf.freeze())),
}
}
}
impl Stream for Field {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
let mut inner = this.inner.borrow_mut();
if let Some(mut buffer) = inner
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(&this.safety)
{
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety)
}
}
impl fmt::Debug for Field {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ct) = &self.content_type {
writeln!(f, "\nField: {}", ct)?;
} else {
writeln!(f, "\nField:")?;
}
writeln!(f, " boundary: {}", self.inner.borrow().boundary)?;
writeln!(f, " headers:")?;
for (key, val) in self.headers.iter() {
writeln!(f, " {:?}: {:?}", key, val)?;
}
Ok(())
}
}
pub(crate) struct InnerField {
/// Payload is initialized as Some and is `take`n when the field stream finishes.
payload: Option<PayloadRef>,
/// Field boundary (without "--" prefix).
boundary: String,
/// True if request payload has been exhausted.
eof: bool,
/// Field data's stated size according to it's Content-Length header.
length: Option<u64>,
}
impl InnerField {
pub(crate) fn new_in_rc(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
}
pub(crate) fn new(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<InnerField, PayloadError> {
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
Some(len) => Some(len),
None => return Err(PayloadError::Incomplete(None)),
}
} else {
None
};
Ok(InnerField {
boundary,
payload: Some(payload),
eof: false,
length: len,
})
}
/// Reads body part content chunk of the specified size.
///
/// The body part must has `Content-Length` header with proper value.
pub(crate) fn read_len(
payload: &mut PayloadBuffer,
size: &mut u64,
) -> Poll<Option<Result<Bytes, Error>>> {
if *size == 0 {
Poll::Ready(None)
} else {
match payload.read_max(*size)? {
Some(mut chunk) => {
let len = cmp::min(chunk.len() as u64, *size);
*size -= len;
let ch = chunk.split_to(len as usize);
if !chunk.is_empty() {
payload.unprocessed(chunk);
}
Poll::Ready(Some(Ok(ch)))
}
None => {
if payload.eof && (*size != 0) {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
}
}
}
}
}
/// Reads content chunk of body part with unknown length.
///
/// The `Content-Length` header for body part is not necessary.
pub(crate) fn read_stream(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Poll<Option<Result<Bytes, Error>>> {
let mut pos = 0;
let len = payload.buf.len();
if len == 0 {
return if payload.eof {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
};
}
// check boundary
if len > 4 && payload.buf[0] == b'\r' {
let b_len = if payload.buf.starts_with(b"\r\n") && &payload.buf[2..4] == b"--" {
Some(4)
} else if &payload.buf[1..3] == b"--" {
Some(3)
} else {
None
};
if let Some(b_len) = b_len {
let b_size = boundary.len() + b_len;
if len < b_size {
return Poll::Pending;
} else if &payload.buf[b_len..b_size] == boundary.as_bytes() {
// found boundary
return Poll::Ready(None);
}
}
}
loop {
return if let Some(idx) = memchr::memmem::find(&payload.buf[pos..], b"\r") {
let cur = pos + idx;
// check if we have enough data for boundary detection
if cur + 4 > len {
if cur > 0 {
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
Poll::Pending
}
} else {
// check boundary
if (&payload.buf[cur..cur + 2] == b"\r\n"
&& &payload.buf[cur + 2..cur + 4] == b"--")
|| (&payload.buf[cur..=cur] == b"\r"
&& &payload.buf[cur + 1..cur + 3] == b"--")
{
if cur != 0 {
// return buffer
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
pos = cur + 1;
continue;
}
} else {
// not boundary
pos = cur + 1;
continue;
}
}
} else {
Poll::Ready(Some(Ok(payload.buf.split().freeze())))
};
}
}
pub(crate) fn poll(&mut self, safety: &Safety) -> Poll<Option<Result<Bytes, Error>>> {
if self.payload.is_none() {
return Poll::Ready(None);
}
let Some(mut payload) = self
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(safety)
else {
return Poll::Pending;
};
if !self.eof {
let res = if let Some(ref mut len) = self.length {
Self::read_len(&mut payload, len)
} else {
Self::read_stream(&mut payload, &self.boundary)
};
match ready!(res) {
Some(Ok(bytes)) => return Poll::Ready(Some(Ok(bytes))),
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
None => self.eof = true,
}
}
let result = match payload.readline() {
Ok(None) => Poll::Pending,
Ok(Some(line)) => {
if line.as_ref() != b"\r\n" {
log::warn!("multipart field did not read all the data or it is malformed");
}
Poll::Ready(None)
}
Err(err) => Poll::Ready(Some(Err(err))),
};
drop(payload);
if let Poll::Ready(None) = result {
// drop payload buffer and make future un-poll-able
let _ = self.payload.take();
}
result
}
}
#[cfg(test)]
mod tests {
use futures_util::{stream, StreamExt as _};
use super::*;
use crate::Multipart;
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
one+one+one\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
two+two+two\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn bytes_unlimited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "one+one+one");
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
#[actix_rt::test]
async fn bytes_limited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(8) // smaller than data size
.await
.expect_err("field data should be size limited");
// next field still readable
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
}

View file

@ -1,7 +1,6 @@
//! Reads a field into memory. //! Reads a field into memory.
use actix_web::HttpRequest; use actix_web::{web::BytesMut, HttpRequest};
use bytes::BytesMut;
use futures_core::future::LocalBoxFuture; use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _; use futures_util::TryStreamExt as _;
use mime::Mime; use mime::Mime;
@ -15,7 +14,7 @@ use crate::{
#[derive(Debug)] #[derive(Debug)]
pub struct Bytes { pub struct Bytes {
/// The data. /// The data.
pub data: bytes::Bytes, pub data: actix_web::web::Bytes,
/// The value of the `Content-Type` header. /// The value of the `Content-Type` header.
pub content_type: Option<Mime>, pub content_type: Option<Mime>,
@ -41,8 +40,9 @@ impl<'t> FieldReader<'t> for Bytes {
content_type: field.content_type().map(ToOwned::to_owned), content_type: field.content_type().map(ToOwned::to_owned),
file_name: field file_name: field
.content_disposition() .content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename() .get_filename()
.map(str::to_owned), .map(ToOwned::to_owned),
}) })
}) })
} }

View file

@ -32,7 +32,6 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = JsonConfig::from_req(req); let config = JsonConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type { if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() { let valid = if let Some(mime) = field.content_type() {
@ -43,17 +42,19 @@ where
if !valid { if !valid {
return Err(MultipartError::Field { return Err(MultipartError::Field {
field_name, name: field.form_field_name,
source: config.map_error(req, JsonFieldError::ContentType), source: config.map_error(req, JsonFieldError::ContentType),
}); });
} }
} }
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?; let bytes = Bytes::read_field(req, field, limits).await?;
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err( Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|err| MultipartError::Field { |err| MultipartError::Field {
field_name, name: form_field_name,
source: config.map_error(req, JsonFieldError::Deserialize(err)), source: config.map_error(req, JsonFieldError::Deserialize(err)),
}, },
)?)) )?))
@ -131,14 +132,12 @@ impl Default for JsonConfig {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{collections::HashMap, io::Cursor}; use std::collections::HashMap;
use actix_multipart_rfc7578::client::multipart; use actix_web::{http::StatusCode, web, web::Bytes, App, HttpResponse, Responder};
use actix_web::{http::StatusCode, web, App, HttpResponse, Responder};
use crate::form::{ use crate::form::{
json::{Json, JsonConfig}, json::{Json, JsonConfig},
tests::send_form,
MultipartForm, MultipartForm,
}; };
@ -155,6 +154,8 @@ mod tests {
HttpResponse::Ok().finish() HttpResponse::Ok().finish()
} }
const TEST_JSON: &str = r#"{"key1": "value1", "key2": "value2"}"#;
#[actix_rt::test] #[actix_rt::test]
async fn test_json_without_content_type() { async fn test_json_without_content_type() {
let srv = actix_test::start(|| { let srv = actix_test::start(|| {
@ -163,10 +164,16 @@ mod tests {
.app_data(JsonConfig::default().validate_content_type(false)) .app_data(JsonConfig::default().validate_content_type(false))
}); });
let mut form = multipart::Form::default(); let (body, headers) = crate::test::create_form_data_payload_and_headers(
form.add_text("json", "{\"key1\": \"value1\", \"key2\": \"value2\"}"); "json",
let response = send_form(&srv, form, "/").await; None,
assert_eq!(response.status(), StatusCode::OK); None,
Bytes::from_static(TEST_JSON.as_bytes()),
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::OK);
} }
#[actix_rt::test] #[actix_rt::test]
@ -178,17 +185,27 @@ mod tests {
}); });
// Deny because wrong content type // Deny because wrong content type
let bytes = Cursor::new("{\"key1\": \"value1\", \"key2\": \"value2\"}"); let (body, headers) = crate::test::create_form_data_payload_and_headers(
let mut form = multipart::Form::default(); "json",
form.add_reader_file_with_mime("json", bytes, "", mime::APPLICATION_OCTET_STREAM); None,
let response = send_form(&srv, form, "/").await; Some(mime::APPLICATION_OCTET_STREAM),
assert_eq!(response.status(), StatusCode::BAD_REQUEST); Bytes::from_static(TEST_JSON.as_bytes()),
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// Allow because correct content type // Allow because correct content type
let bytes = Cursor::new("{\"key1\": \"value1\", \"key2\": \"value2\"}"); let (body, headers) = crate::test::create_form_data_payload_and_headers(
let mut form = multipart::Form::default(); "json",
form.add_reader_file_with_mime("json", bytes, "", mime::APPLICATION_JSON); None,
let response = send_form(&srv, form, "/").await; Some(mime::APPLICATION_JSON),
assert_eq!(response.status(), StatusCode::OK); Bytes::from_static(TEST_JSON.as_bytes()),
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::OK);
} }
} }

View file

@ -1,4 +1,4 @@
//! Process and extract typed data from a multipart stream. //! Extract and process typed data from fields of a `multipart/form-data` request.
use std::{ use std::{
any::Any, any::Any,
@ -33,6 +33,14 @@ pub trait FieldReader<'t>: Sized + Any {
type Future: Future<Output = Result<Self, MultipartError>>; type Future: Future<Output = Result<Self, MultipartError>>;
/// The form will call this function to handle the field. /// The form will call this function to handle the field.
///
/// # Panics
///
/// When reading the `field` payload using its `Stream` implementation, polling (manually or via
/// `next()`/`try_next()`) may panic after the payload is exhausted. If this is a problem for
/// your implementation of this method, you should [`fuse()`] the `Field` first.
///
/// [`fuse()`]: futures_util::stream::StreamExt::fuse()
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future; fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
} }
@ -72,13 +80,13 @@ where
state: &'t mut State, state: &'t mut State,
duplicate_field: DuplicateField, duplicate_field: DuplicateField,
) -> Self::Future { ) -> Self::Future {
if state.contains_key(field.name()) { if state.contains_key(&field.form_field_name) {
match duplicate_field { match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))), DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => { DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField( return Box::pin(ready(Err(MultipartError::DuplicateField(
field.name().to_owned(), field.form_field_name,
)))) ))))
} }
@ -87,7 +95,7 @@ where
} }
Box::pin(async move { Box::pin(async move {
let field_name = field.name().to_owned(); let field_name = field.form_field_name.clone();
let t = T::read_field(req, field, limits).await?; let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t)); state.insert(field_name, Box::new(t));
Ok(()) Ok(())
@ -115,10 +123,8 @@ where
Box::pin(async move { Box::pin(async move {
// Note: Vec GroupReader always allows duplicates // Note: Vec GroupReader always allows duplicates
let field_name = field.name().to_owned();
let vec = state let vec = state
.entry(field_name) .entry(field.form_field_name.clone())
.or_insert_with(|| Box::<Vec<T>>::default()) .or_insert_with(|| Box::<Vec<T>>::default())
.downcast_mut::<Vec<T>>() .downcast_mut::<Vec<T>>()
.unwrap(); .unwrap();
@ -151,13 +157,13 @@ where
state: &'t mut State, state: &'t mut State,
duplicate_field: DuplicateField, duplicate_field: DuplicateField,
) -> Self::Future { ) -> Self::Future {
if state.contains_key(field.name()) { if state.contains_key(&field.form_field_name) {
match duplicate_field { match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))), DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => { DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField( return Box::pin(ready(Err(MultipartError::DuplicateField(
field.name().to_owned(), field.form_field_name,
)))) ))))
} }
@ -166,7 +172,7 @@ where
} }
Box::pin(async move { Box::pin(async move {
let field_name = field.name().to_owned(); let field_name = field.form_field_name.clone();
let t = T::read_field(req, field, limits).await?; let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t)); state.insert(field_name, Box::new(t));
Ok(()) Ok(())
@ -273,6 +279,9 @@ impl Limits {
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this /// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
/// for your struct. /// for your struct.
/// ///
/// Note that this extractor rejects requests with any other Content-Type such as `multipart/mixed`,
/// `multipart/related`, or non-multipart media types.
///
/// Add a [`MultipartFormConfig`] to your app data to configure extraction. /// Add a [`MultipartFormConfig`] to your app data to configure extraction.
#[derive(Deref, DerefMut)] #[derive(Deref, DerefMut)]
pub struct MultipartForm<T: MultipartCollect>(pub T); pub struct MultipartForm<T: MultipartCollect>(pub T);
@ -286,14 +295,24 @@ impl<T: MultipartCollect> MultipartForm<T> {
impl<T> FromRequest for MultipartForm<T> impl<T> FromRequest for MultipartForm<T>
where where
T: MultipartCollect, T: MultipartCollect + 'static,
{ {
type Error = Error; type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>; type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
let mut payload = Multipart::new(req.headers(), payload.take()); let mut multipart = Multipart::from_req(req, payload);
let content_type = match multipart.content_type_or_bail() {
Ok(content_type) => content_type,
Err(err) => return Box::pin(ready(Err(err.into()))),
};
if content_type.subtype() != mime::FORM_DATA {
// this extractor only supports multipart/form-data
return Box::pin(ready(Err(MultipartError::ContentTypeIncompatible.into())));
};
let config = MultipartFormConfig::from_req(req); let config = MultipartFormConfig::from_req(req);
let mut limits = Limits::new(config.total_limit, config.memory_limit); let mut limits = Limits::new(config.total_limit, config.memory_limit);
@ -305,21 +324,29 @@ where
Box::pin( Box::pin(
async move { async move {
let mut state = State::default(); let mut state = State::default();
// We need to ensure field limits are shared for all instances of this field name
// ensure limits are shared for all fields with this name
let mut field_limits = HashMap::<String, Option<usize>>::new(); let mut field_limits = HashMap::<String, Option<usize>>::new();
while let Some(field) = payload.try_next().await? { while let Some(field) = multipart.try_next().await? {
debug_assert!(
!field.form_field_name.is_empty(),
"multipart form fields should have names",
);
// Retrieve the limit for this field // Retrieve the limit for this field
let entry = field_limits let entry = field_limits
.entry(field.name().to_owned()) .entry(field.form_field_name.clone())
.or_insert_with(|| T::limit(field.name())); .or_insert_with(|| T::limit(&field.form_field_name));
limits.field_limit_remaining = entry.to_owned();
limits.field_limit_remaining.clone_from(entry);
T::handle_field(&req, field, &mut limits, &mut state).await?; T::handle_field(&req, field, &mut limits, &mut state).await?;
// Update the stored limit // Update the stored limit
*entry = limits.field_limit_remaining; *entry = limits.field_limit_remaining;
} }
let inner = T::from_state(state)?; let inner = T::from_state(state)?;
Ok(MultipartForm(inner)) Ok(MultipartForm(inner))
} }
@ -395,11 +422,20 @@ mod tests {
use actix_http::encoding::Decoder; use actix_http::encoding::Decoder;
use actix_multipart_rfc7578::client::multipart; use actix_multipart_rfc7578::client::multipart;
use actix_test::TestServer; use actix_test::TestServer;
use actix_web::{dev::Payload, http::StatusCode, web, App, HttpResponse, Responder}; use actix_web::{
dev::Payload, http::StatusCode, web, App, HttpRequest, HttpResponse, Resource, Responder,
};
use awc::{Client, ClientResponse}; use awc::{Client, ClientResponse};
use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _;
use super::MultipartForm; use super::MultipartForm;
use crate::form::{bytes::Bytes, tempfile::TempFile, text::Text, MultipartFormConfig}; use crate::{
form::{
bytes::Bytes, tempfile::TempFile, text::Text, FieldReader, Limits, MultipartFormConfig,
},
Field, MultipartError,
};
pub async fn send_form( pub async fn send_form(
srv: &TestServer, srv: &TestServer,
@ -733,4 +769,84 @@ mod tests {
let response = send_form(&srv, form, "/").await; let response = send_form(&srv, form, "/").await;
assert_eq!(response.status(), StatusCode::BAD_REQUEST); assert_eq!(response.status(), StatusCode::BAD_REQUEST);
} }
#[actix_rt::test]
async fn non_multipart_form_data() {
#[derive(MultipartForm)]
struct TestNonMultipartFormData {
#[allow(unused)]
#[multipart(limit = "30B")]
foo: Text<String>,
}
async fn non_multipart_form_data_route(
_form: MultipartForm<TestNonMultipartFormData>,
) -> String {
unreachable!("request is sent with multipart/mixed");
}
let srv = actix_test::start(|| {
App::new().route("/", web::post().to(non_multipart_form_data_route))
});
let mut form = multipart::Form::default();
form.add_text("foo", "foo");
// mangle content-type, keeping the boundary
let ct = form.content_type().replacen("/form-data", "/mixed", 1);
let res = Client::default()
.post(srv.url("/"))
.content_type(ct)
.send_body(multipart::Body::from(form))
.await
.unwrap();
assert_eq!(res.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[should_panic(expected = "called `Result::unwrap()` on an `Err` value: Connect(Disconnected)")]
#[actix_web::test]
async fn field_try_next_panic() {
#[derive(Debug)]
struct NullSink;
impl<'t> FieldReader<'t> for NullSink {
type Future = LocalBoxFuture<'t, Result<Self, MultipartError>>;
fn read_field(
_: &'t HttpRequest,
mut field: Field,
_limits: &'t mut Limits,
) -> Self::Future {
Box::pin(async move {
// exhaust field stream
while let Some(_chunk) = field.try_next().await? {}
// poll again, crash
let _post = field.try_next().await;
Ok(Self)
})
}
}
#[allow(dead_code)]
#[derive(MultipartForm)]
struct NullSinkForm {
foo: NullSink,
}
async fn null_sink(_form: MultipartForm<NullSinkForm>) -> impl Responder {
"unreachable"
}
let srv = actix_test::start(|| App::new().service(Resource::new("/").post(null_sink)));
let mut form = multipart::Form::default();
form.add_text("foo", "data is not important to this test");
// panics with Err(Connect(Disconnected)) due to form NullSink panic
let _res = send_form(&srv, form, "/").await;
}
} }

View file

@ -42,38 +42,36 @@ impl<'t> FieldReader<'t> for TempFile {
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = TempFileConfig::from_req(req); let config = TempFileConfig::from_req(req);
let field_name = field.name().to_owned();
let mut size = 0; let mut size = 0;
let file = config let file = config.create_tempfile().map_err(|err| {
.create_tempfile() config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?; })?;
let mut file_async = let mut file_async = tokio::fs::File::from_std(file.reopen().map_err(|err| {
tokio::fs::File::from_std(file.reopen().map_err(|err| { config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
config.map_error(req, &field_name, TempFileError::FileIo(err)) })?);
})?);
while let Some(chunk) = field.try_next().await? { while let Some(chunk) = field.try_next().await? {
limits.try_consume_limits(chunk.len(), false)?; limits.try_consume_limits(chunk.len(), false)?;
size += chunk.len(); size += chunk.len();
file_async.write_all(chunk.as_ref()).await.map_err(|err| { file_async.write_all(chunk.as_ref()).await.map_err(|err| {
config.map_error(req, &field_name, TempFileError::FileIo(err)) config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
})?; })?;
} }
file_async file_async.flush().await.map_err(|err| {
.flush() config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
.await })?;
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
Ok(TempFile { Ok(TempFile {
file, file,
content_type: field.content_type().map(ToOwned::to_owned), content_type: field.content_type().map(ToOwned::to_owned),
file_name: field file_name: field
.content_disposition() .content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename() .get_filename()
.map(str::to_owned), .map(ToOwned::to_owned),
size, size,
}) })
}) })
@ -137,7 +135,7 @@ impl TempFileConfig {
}; };
MultipartError::Field { MultipartError::Field {
field_name: field_name.to_owned(), name: field_name.to_owned(),
source, source,
} }
} }

View file

@ -36,7 +36,6 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = TextConfig::from_req(req); let config = TextConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type { if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() { let valid = if let Some(mime) = field.content_type() {
@ -49,22 +48,24 @@ where
if !valid { if !valid {
return Err(MultipartError::Field { return Err(MultipartError::Field {
field_name, name: field.form_field_name,
source: config.map_error(req, TextError::ContentType), source: config.map_error(req, TextError::ContentType),
}); });
} }
} }
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?; let bytes = Bytes::read_field(req, field, limits).await?;
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field { let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
field_name: field_name.clone(), name: form_field_name.clone(),
source: config.map_error(req, TextError::Utf8Error(err)), source: config.map_error(req, TextError::Utf8Error(err)),
})?; })?;
Ok(Text(serde_plain::from_str(text).map_err(|err| { Ok(Text(serde_plain::from_str(text).map_err(|err| {
MultipartError::Field { MultipartError::Field {
field_name, name: form_field_name,
source: config.map_error(req, TextError::Deserialize(err)), source: config.map_error(req, TextError::Deserialize(err)),
} }
})?)) })?))

View file

@ -1,8 +1,61 @@
//! Multipart form support for Actix Web. //! Multipart request & form support for Actix Web.
//!
//! The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including
//! `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level
//! extractor which supports reading [multipart fields](Field), in the order they are sent by the
//! client.
//!
//! Due to additional requirements for `multipart/form-data` requests, the higher level
//! [`MultipartForm`] extractor and derive macro only supports this media type.
//!
//! # Examples
//!
//! ```no_run
//! use actix_web::{post, App, HttpServer, Responder};
//!
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
//! use serde::Deserialize;
//!
//! #[derive(Debug, Deserialize)]
//! struct Metadata {
//! name: String,
//! }
//!
//! #[derive(Debug, MultipartForm)]
//! struct UploadForm {
//! #[multipart(limit = "100MB")]
//! file: TempFile,
//! json: MpJson<Metadata>,
//! }
//!
//! #[post("/videos")]
//! pub async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
//! format!(
//! "Uploaded file {}, with size: {}",
//! form.json.name, form.file.size
//! )
//! }
//!
//! #[actix_web::main]
//! async fn main() -> std::io::Result<()> {
//! HttpServer::new(move || App::new().service(post_video))
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! }
//! ```
//!
//! cURL request:
//!
//! ```sh
//! curl -v --request POST \
//! --url http://localhost:8080/videos \
//! -F 'json={"name": "Cargo.lock"};type=application/json' \
//! -F file=@./Cargo.lock
//! ```
//!
//! [`MultipartForm`]: struct@form::MultipartForm
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(clippy::borrow_interior_mutable_const)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -13,11 +66,15 @@ extern crate self as actix_multipart;
mod error; mod error;
mod extractor; mod extractor;
mod server; pub(crate) mod field;
pub mod form; pub mod form;
mod multipart;
pub(crate) mod payload;
pub(crate) mod safety;
pub mod test;
pub use self::{ pub use self::{
error::MultipartError, error::Error as MultipartError,
server::{Field, Multipart}, field::{Field, LimitExceeded},
multipart::Multipart,
}; };

View file

@ -0,0 +1,883 @@
//! Multipart response payload support.
use std::{
cell::RefCell,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
dev,
error::{ParseError, PayloadError},
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
web::Bytes,
HttpRequest,
};
use futures_core::stream::Stream;
use mime::Mime;
use crate::{
error::Error,
field::InnerField,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
Field,
};
const MAX_HEADERS: usize = 32;
/// The server-side implementation of `multipart/form-data` requests.
///
/// This will parse the incoming stream into `MultipartItem` instances via its `Stream`
/// implementation. `MultipartItem::Field` contains multipart field. `MultipartItem::Multipart` is
/// used for nested multipart streams.
pub struct Multipart {
flow: Flow,
safety: Safety,
}
enum Flow {
InFlight(Inner),
/// Error container is Some until an error is returned out of the flow.
Error(Option<Error>),
}
impl Multipart {
/// Creates multipart instance from parts.
pub fn new<S>(headers: &HeaderMap, stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
match Self::find_ct_and_boundary(headers) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, stream),
Err(err) => Self::from_error(err),
}
}
/// Creates multipart instance from parts.
pub(crate) fn from_req(req: &HttpRequest, payload: &mut dev::Payload) -> Self {
match Self::find_ct_and_boundary(req.headers()) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, payload.take()),
Err(err) => Self::from_error(err),
}
}
/// Extract Content-Type and boundary info from headers.
pub(crate) fn find_ct_and_boundary(headers: &HeaderMap) -> Result<(Mime, String), Error> {
let content_type = headers
.get(&header::CONTENT_TYPE)
.ok_or(Error::ContentTypeMissing)?
.to_str()
.ok()
.and_then(|content_type| content_type.parse::<Mime>().ok())
.ok_or(Error::ContentTypeParse)?;
if content_type.type_() != mime::MULTIPART {
return Err(Error::ContentTypeIncompatible);
}
let boundary = content_type
.get_param(mime::BOUNDARY)
.ok_or(Error::BoundaryMissing)?
.as_str()
.to_owned();
Ok((content_type, boundary))
}
/// Constructs a new multipart reader from given Content-Type, boundary, and stream.
pub(crate) fn from_ct_and_boundary<S>(ct: Mime, boundary: String, stream: S) -> Multipart
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
Multipart {
safety: Safety::new(),
flow: Flow::InFlight(Inner {
payload: PayloadRef::new(PayloadBuffer::new(stream)),
content_type: ct,
boundary,
state: State::FirstBoundary,
item: Item::None,
}),
}
}
/// Constructs a new multipart reader from given `MultipartError`.
pub(crate) fn from_error(err: Error) -> Multipart {
Multipart {
flow: Flow::Error(Some(err)),
safety: Safety::new(),
}
}
/// Return requests parsed Content-Type or raise the stored error.
pub(crate) fn content_type_or_bail(&mut self) -> Result<mime::Mime, Error> {
match self.flow {
Flow::InFlight(ref inner) => Ok(inner.content_type.clone()),
Flow::Error(ref mut err) => Err(err
.take()
.expect("error should not be taken after it was returned")),
}
}
}
impl Stream for Multipart {
type Item = Result<Field, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
match this.flow {
Flow::InFlight(ref mut inner) => {
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety, cx)
}
Flow::Error(ref mut err) => Poll::Ready(Some(Err(err
.take()
.expect("Multipart polled after finish")))),
}
}
}
#[derive(PartialEq, Debug)]
enum State {
/// Skip data until first boundary.
FirstBoundary,
/// Reading boundary.
Boundary,
/// Reading Headers.
Headers,
/// Stream EOF.
Eof,
}
enum Item {
None,
Field(Rc<RefCell<InnerField>>),
}
struct Inner {
/// Request's payload stream & buffer.
payload: PayloadRef,
/// Request's Content-Type.
///
/// Guaranteed to have "multipart" top-level media type, i.e., `multipart/*`.
content_type: Mime,
/// Field boundary.
boundary: String,
state: State,
item: Item,
}
impl Inner {
fn read_field_headers(payload: &mut PayloadBuffer) -> Result<Option<HeaderMap>, Error> {
match payload.read_until(b"\r\n\r\n")? {
None => {
if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
Some(bytes) => {
let mut hdrs = [httparse::EMPTY_HEADER; MAX_HEADERS];
match httparse::parse_headers(&bytes, &mut hdrs).map_err(ParseError::from)? {
httparse::Status::Complete((_, hdrs)) => {
// convert headers
let mut headers = HeaderMap::with_capacity(hdrs.len());
for h in hdrs {
let name =
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
let value =
HeaderValue::try_from(h.value).map_err(|_| ParseError::Header)?;
headers.append(name, value);
}
Ok(Some(headers))
}
httparse::Status::Partial => Err(ParseError::Header.into()),
}
}
}
}
/// Reads a field boundary from the payload buffer (and discards it).
///
/// Reads "in-between" and "final" boundaries. E.g. for boundary = "foo":
///
/// ```plain
/// --foo <-- in-between fields
/// --foo-- <-- end of request body, should be followed by EOF
/// ```
///
/// Returns:
///
/// - `Ok(Some(true))` - final field boundary read (EOF)
/// - `Ok(Some(false))` - field boundary read
/// - `Ok(None)` - boundary not found, more data needs reading
/// - `Err(BoundaryMissing)` - multipart boundary is missing
fn read_boundary(payload: &mut PayloadBuffer, boundary: &str) -> Result<Option<bool>, Error> {
// TODO: need to read epilogue
let chunk = match payload.readline_or_eof()? {
// TODO: this might be okay as a let Some() else return Ok(None)
None => return Ok(payload.eof.then_some(true)),
Some(chunk) => chunk,
};
const BOUNDARY_MARKER: &[u8] = b"--";
const LINE_BREAK: &[u8] = b"\r\n";
let boundary_len = boundary.len();
if chunk.len() < boundary_len + 2 + 2
|| !chunk.starts_with(BOUNDARY_MARKER)
|| &chunk[2..boundary_len + 2] != boundary.as_bytes()
{
return Err(Error::BoundaryMissing);
}
// chunk facts:
// - long enough to contain boundary + 2 markers or 1 marker and line-break
// - starts with boundary marker
// - chunk contains correct boundary
if &chunk[boundary_len + 2..] == LINE_BREAK {
// boundary is followed by line-break, indicating more fields to come
return Ok(Some(false));
}
// boundary is followed by marker
if &chunk[boundary_len + 2..boundary_len + 4] == BOUNDARY_MARKER
&& (
// chunk is exactly boundary len + 2 markers
chunk.len() == boundary_len + 2 + 2
// final boundary is allowed to end with a line-break
|| &chunk[boundary_len + 4..] == LINE_BREAK
)
{
return Ok(Some(true));
}
Err(Error::BoundaryMissing)
}
fn skip_until_boundary(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Result<Option<bool>, Error> {
let mut eof = false;
loop {
match payload.readline()? {
Some(chunk) => {
if chunk.is_empty() {
return Err(Error::BoundaryMissing);
}
if chunk.len() < boundary.len() {
continue;
}
if &chunk[..2] == b"--" && &chunk[2..chunk.len() - 2] == boundary.as_bytes() {
break;
} else {
if chunk.len() < boundary.len() + 2 {
continue;
}
let b: &[u8] = boundary.as_ref();
if &chunk[..boundary.len()] == b
&& &chunk[boundary.len()..boundary.len() + 2] == b"--"
{
eof = true;
break;
}
}
}
None => {
return if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
};
}
}
}
Ok(Some(eof))
}
fn poll(&mut self, safety: &Safety, cx: &Context<'_>) -> Poll<Option<Result<Field, Error>>> {
if self.state == State::Eof {
Poll::Ready(None)
} else {
// release field
loop {
// Nested multipart streams of fields has to be consumed
// before switching to next
if safety.current() {
let stop = match self.item {
Item::Field(ref mut field) => match field.borrow_mut().poll(safety) {
Poll::Pending => return Poll::Pending,
Poll::Ready(Some(Ok(_))) => continue,
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
Poll::Ready(None) => true,
},
Item::None => false,
};
if stop {
self.item = Item::None;
}
if let Item::None = self.item {
break;
}
}
}
let field_headers = if let Some(mut payload) = self.payload.get_mut(safety) {
match self.state {
// read until first boundary
State::FirstBoundary => {
match Inner::skip_until_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
}
}
// read boundary
State::Boundary => match Inner::read_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
},
_ => {}
}
// read field headers for next field
if self.state == State::Headers {
if let Some(headers) = Inner::read_field_headers(&mut payload)? {
self.state = State::Boundary;
headers
} else {
return Poll::Pending;
}
} else {
unreachable!()
}
} else {
log::debug!("NotReady: field is in flight");
return Poll::Pending;
};
let field_content_disposition = field_headers
.get(&header::CONTENT_DISPOSITION)
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
.filter(|content_disposition| {
matches!(
content_disposition.disposition,
header::DispositionType::FormData,
)
});
let form_field_name = if self.content_type.subtype() == mime::FORM_DATA {
// According to RFC 7578 §4.2, which relates to "multipart/form-data" requests
// specifically, fields must have a Content-Disposition header, its disposition
// type must be set as "form-data", and it must have a name parameter.
let Some(cd) = &field_content_disposition else {
return Poll::Ready(Some(Err(Error::ContentDispositionMissing)));
};
let Some(field_name) = cd.get_name() else {
return Poll::Ready(Some(Err(Error::ContentDispositionNameMissing)));
};
Some(field_name.to_owned())
} else {
None
};
// TODO: check out other multipart/* RFCs for specific requirements
let field_content_type: Option<Mime> = field_headers
.get(&header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
.and_then(|ct| ct.parse().ok());
self.state = State::Boundary;
// nested multipart stream is not supported
if let Some(mime) = &field_content_type {
if mime.type_() == mime::MULTIPART {
return Poll::Ready(Some(Err(Error::Nested)));
}
}
let field_inner =
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &field_headers)?;
self.item = Item::Field(Rc::clone(&field_inner));
Poll::Ready(Some(Ok(Field::new(
field_content_type,
field_content_disposition,
form_field_name,
field_headers,
safety.clone(cx),
field_inner,
))))
}
}
}
impl Drop for Inner {
fn drop(&mut self) {
// InnerMultipartItem::Field has to be dropped first because of Safety.
self.item = Item::None;
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use actix_http::h1;
use actix_web::{
http::header::{DispositionParam, DispositionType},
rt,
test::TestRequest,
web::{BufMut as _, BytesMut},
FromRequest,
};
use assert_matches::assert_matches;
use futures_test::stream::StreamTestExt as _;
use futures_util::{stream, StreamExt as _};
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use super::*;
const BOUNDARY: &str = "abbc761f78ff4d7cb7573b5a23f96ef0";
#[actix_rt::test]
async fn test_boundary() {
let headers = HeaderMap::new();
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("test"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeParse) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("multipart/mixed"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::BoundaryMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"5c02368e880e436dab70ed54e1c58209\"",
),
);
assert_eq!(
Multipart::find_ct_and_boundary(&headers).unwrap().1,
"5c02368e880e436dab70ed54e1c58209",
);
}
fn create_stream() -> (
mpsc::UnboundedSender<Result<Bytes, PayloadError>>,
impl Stream<Item = Result<Bytes, PayloadError>>,
) {
let (tx, rx) = mpsc::unbounded_channel();
(
tx,
UnboundedReceiverStream::new(rx).map(|res| res.map_err(|_| panic!())),
)
}
fn create_simple_request_with_header() -> (Bytes, HeaderMap) {
let (body, headers) = crate::test::create_form_data_payload_and_headers_with_boundary(
BOUNDARY,
"file",
Some("fn.txt".to_owned()),
Some(mime::TEXT_PLAIN_UTF_8),
Bytes::from_static(b"data"),
);
let mut buf = BytesMut::with_capacity(body.len() + 14);
// add junk before form to test pre-boundary data rejection
buf.put("testasdadsad\r\n".as_bytes());
buf.put(body);
(buf.freeze(), headers)
}
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
data\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn test_multipart_no_end_crlf() {
let (sender, payload) = create_stream();
let (mut bytes, headers) = create_double_request_with_header();
let bytes_stripped = bytes.split_to(bytes.len()); // strip crlf
sender.send(Ok(bytes_stripped)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await {
Some(Ok(mut field)) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await.unwrap() {
Ok(chunk) => assert_eq!(chunk, "test"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(mut field) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await {
Some(Ok(chunk)) => assert_eq!(chunk, "data"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
// Loops, collecting all bytes until end-of-field
async fn get_whole_field(field: &mut Field) -> BytesMut {
let mut b = BytesMut::new();
loop {
match field.next().await {
Some(Ok(chunk)) => b.extend_from_slice(&chunk),
None => return b,
_ => unreachable!(),
}
}
}
#[actix_rt::test]
async fn test_stream() {
let (bytes, headers) = create_double_request_with_header();
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(mut field) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "test");
}
_ => unreachable!(),
}
match multipart.next().await {
Some(Ok(mut field)) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "data");
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_multipart_from_error() {
let err = Error::ContentTypeMissing;
let mut multipart = Multipart::from_error(err);
assert!(multipart.next().await.unwrap().is_err())
}
#[actix_rt::test]
async fn test_multipart_from_boundary() {
let (_, payload) = create_stream();
let (_, headers) = create_simple_request_with_header();
let (ct, boundary) = Multipart::find_ct_and_boundary(&headers).unwrap();
let _ = Multipart::from_ct_and_boundary(ct, boundary, payload);
}
#[actix_rt::test]
async fn test_multipart_payload_consumption() {
// with sample payload and HttpRequest with no headers
let (_, inner_payload) = h1::Payload::create(false);
let mut payload = actix_web::dev::Payload::from(inner_payload);
let req = TestRequest::default().to_http_request();
// multipart should generate an error
let mut mp = Multipart::from_request(&req, &mut payload).await.unwrap();
assert!(mp.next().await.unwrap().is_err());
// and should not consume the payload
match payload {
actix_web::dev::Payload::H1 { .. } => {} //expected
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn no_content_disposition_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err(
"according to RFC 7578, form-data fields require a content-disposition header"
),
Error::ContentDispositionMissing
);
}
#[actix_rt::test]
async fn no_content_disposition_non_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
res.unwrap();
}
#[actix_rt::test]
async fn no_name_in_form_data_content_disposition() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err("according to RFC 7578, form-data fields require a name attribute"),
Error::ContentDispositionNameMissing
);
}
#[actix_rt::test]
async fn test_drop_multipart_dont_hang() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_simple_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
drop(multipart);
// should fail immediately
match field.next().await {
Some(Err(Error::NotConsumed)) => {}
_ => panic!(),
};
}
#[actix_rt::test]
async fn test_drop_field_awaken_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
let task = rt::spawn(async move {
rt::time::sleep(Duration::from_millis(500)).await;
assert_eq!(field.next().await.unwrap().unwrap(), "test");
drop(field);
});
// dropping field should awaken current task
let _ = multipart.next().await.unwrap().unwrap();
task.await.unwrap();
}
}

View file

@ -0,0 +1,255 @@
use std::{
cell::{RefCell, RefMut},
cmp, mem,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
error::PayloadError,
web::{Bytes, BytesMut},
};
use futures_core::stream::{LocalBoxStream, Stream};
use crate::{error::Error, safety::Safety};
pub(crate) struct PayloadRef {
payload: Rc<RefCell<PayloadBuffer>>,
}
impl PayloadRef {
pub(crate) fn new(payload: PayloadBuffer) -> PayloadRef {
PayloadRef {
payload: Rc::new(RefCell::new(payload)),
}
}
pub(crate) fn get_mut(&self, safety: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
if safety.current() {
Some(self.payload.borrow_mut())
} else {
None
}
}
}
impl Clone for PayloadRef {
fn clone(&self) -> PayloadRef {
PayloadRef {
payload: Rc::clone(&self.payload),
}
}
}
/// Payload buffer.
pub(crate) struct PayloadBuffer {
pub(crate) stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,
pub(crate) buf: BytesMut,
/// EOF flag. If true, no more payload reads will be attempted.
pub(crate) eof: bool,
}
impl PayloadBuffer {
/// Constructs new payload buffer.
pub(crate) fn new<S>(stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
PayloadBuffer {
stream: Box::pin(stream),
buf: BytesMut::with_capacity(1_024), // pre-allocate 1KiB
eof: false,
}
}
pub(crate) fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {
loop {
match Pin::new(&mut self.stream).poll_next(cx) {
Poll::Ready(Some(Ok(data))) => {
self.buf.extend_from_slice(&data);
// try to read more data
continue;
}
Poll::Ready(Some(Err(err))) => return Err(err),
Poll::Ready(None) => {
self.eof = true;
return Ok(());
}
Poll::Pending => return Ok(()),
}
}
}
/// Reads exact number of bytes.
#[cfg(test)]
pub(crate) fn read_exact(&mut self, size: usize) -> Option<Bytes> {
if size <= self.buf.len() {
Some(self.buf.split_to(size).freeze())
} else {
None
}
}
pub(crate) fn read_max(&mut self, size: u64) -> Result<Option<Bytes>, Error> {
if !self.buf.is_empty() {
let size = cmp::min(self.buf.len() as u64, size) as usize;
Ok(Some(self.buf.split_to(size).freeze()))
} else if self.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
/// Reads until specified ending.
///
/// Returns:
///
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
/// - `Ok(None)` - `needle` is not found otherwise
pub(crate) fn read_until(&mut self, needle: &[u8]) -> Result<Option<Bytes>, Error> {
match memchr::memmem::find(&self.buf, needle) {
// buffer exhausted and EOF without finding needle
None if self.eof => Err(Error::Incomplete),
// needle not yet found
None => Ok(None),
// needle found, split chunk out of buf
Some(idx) => Ok(Some(self.buf.split_to(idx + needle.len()).freeze())),
}
}
/// Reads bytes until new line delimiter (`\n`, `0x0A`).
///
/// Returns:
///
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
/// - `Ok(None)` - `needle` is not found otherwise
#[inline]
pub(crate) fn readline(&mut self) -> Result<Option<Bytes>, Error> {
self.read_until(b"\n")
}
/// Reads bytes until new line delimiter or until EOF.
#[inline]
pub(crate) fn readline_or_eof(&mut self) -> Result<Option<Bytes>, Error> {
match self.readline() {
Err(Error::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
line => line,
}
}
/// Puts unprocessed data back to the buffer.
pub(crate) fn unprocessed(&mut self, data: Bytes) {
// TODO: use BytesMut::from when it's released, see https://github.com/tokio-rs/bytes/pull/710
let buf = BytesMut::from(&data[..]);
let buf = mem::replace(&mut self.buf, buf);
self.buf.extend_from_slice(&buf);
}
}
#[cfg(test)]
mod tests {
use actix_http::h1;
use futures_util::future::lazy;
use super::*;
#[actix_rt::test]
async fn basic() {
let (_, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(payload.buf.len(), 0);
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(None, payload.read_max(1).unwrap());
}
#[actix_rt::test]
async fn eof() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(4).unwrap());
sender.feed_data(Bytes::from("data"));
sender.feed_eof();
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from("data")), payload.read_max(4).unwrap());
assert_eq!(payload.buf.len(), 0);
assert!(payload.read_max(1).is_err());
assert!(payload.eof);
}
#[actix_rt::test]
async fn err() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(1).unwrap());
sender.set_error(PayloadError::Incomplete(None));
lazy(|cx| payload.poll_stream(cx)).await.err().unwrap();
}
#[actix_rt::test]
async fn read_max() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(payload.buf.len(), 10);
assert_eq!(Some(Bytes::from("line1")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 5);
assert_eq!(Some(Bytes::from("line2")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 0);
}
#[actix_rt::test]
async fn read_exactly() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_exact(2));
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from_static(b"li")), payload.read_exact(2));
assert_eq!(payload.buf.len(), 8);
assert_eq!(Some(Bytes::from_static(b"ne1l")), payload.read_exact(4));
assert_eq!(payload.buf.len(), 4);
}
#[actix_rt::test]
async fn read_until() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_until(b"ne").unwrap());
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(
Some(Bytes::from("line")),
payload.read_until(b"ne").unwrap()
);
assert_eq!(payload.buf.len(), 6);
assert_eq!(
Some(Bytes::from("1line2")),
payload.read_until(b"2").unwrap()
);
assert_eq!(payload.buf.len(), 0);
}
}

View file

@ -0,0 +1,60 @@
use std::{cell::Cell, marker::PhantomData, rc::Rc, task};
use local_waker::LocalWaker;
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
///
/// - When dropped, parent task is awakened. This is to support the case where `Field` is dropped in
/// a separate task than `Multipart`.
/// - Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
/// - If dropped and is not top most owner, is_clean flag is set to false.
#[derive(Debug)]
pub(crate) struct Safety {
task: LocalWaker,
level: usize,
payload: Rc<PhantomData<bool>>,
clean: Rc<Cell<bool>>,
}
impl Safety {
pub(crate) fn new() -> Safety {
let payload = Rc::new(PhantomData);
Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: Rc::new(Cell::new(true)),
payload,
}
}
pub(crate) fn current(&self) -> bool {
Rc::strong_count(&self.payload) == self.level && self.clean.get()
}
pub(crate) fn is_clean(&self) -> bool {
self.clean.get()
}
pub(crate) fn clone(&self, cx: &task::Context<'_>) -> Safety {
let payload = Rc::clone(&self.payload);
let s = Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: self.clean.clone(),
payload,
};
s.task.register(cx.waker());
s
}
}
impl Drop for Safety {
fn drop(&mut self) {
if Rc::strong_count(&self.payload) != self.level {
// Multipart dropped leaving a Field
self.clean.set(false);
}
self.task.wake();
}
}

File diff suppressed because it is too large Load diff

220
actix-multipart/src/test.rs Normal file
View file

@ -0,0 +1,220 @@
//! Multipart testing utilities.
use actix_web::{
http::header::{self, HeaderMap},
web::{BufMut as _, Bytes, BytesMut},
};
use mime::Mime;
use rand::{
distributions::{Alphanumeric, DistString as _},
thread_rng,
};
const CRLF: &[u8] = b"\r\n";
const CRLF_CRLF: &[u8] = b"\r\n\r\n";
const HYPHENS: &[u8] = b"--";
const BOUNDARY_PREFIX: &str = "------------------------";
/// Constructs a `multipart/form-data` payload from bytes and metadata.
///
/// Returned header map can be extended or merged with existing headers.
///
/// Multipart boundary used is a random alphanumeric string.
///
/// # Examples
///
/// ```
/// use actix_multipart::test::create_form_data_payload_and_headers;
/// use actix_web::{test::TestRequest, web::Bytes};
/// use memchr::memmem::find;
///
/// let (body, headers) = create_form_data_payload_and_headers(
/// "foo",
/// Some("lorem.txt".to_owned()),
/// Some(mime::TEXT_PLAIN_UTF_8),
/// Bytes::from_static(b"Lorem ipsum."),
/// );
///
/// assert!(find(&body, b"foo").is_some());
/// assert!(find(&body, b"lorem.txt").is_some());
/// assert!(find(&body, b"text/plain; charset=utf-8").is_some());
/// assert!(find(&body, b"Lorem ipsum.").is_some());
///
/// let req = TestRequest::default();
///
/// // merge header map into existing test request and set multipart body
/// let req = headers
/// .into_iter()
/// .fold(req, |req, hdr| req.insert_header(hdr))
/// .set_payload(body)
/// .to_http_request();
///
/// assert!(
/// req.headers()
/// .get("content-type")
/// .unwrap()
/// .to_str()
/// .unwrap()
/// .starts_with("multipart/form-data; boundary=\"")
/// );
/// ```
pub fn create_form_data_payload_and_headers(
name: &str,
filename: Option<String>,
content_type: Option<Mime>,
file: Bytes,
) -> (Bytes, HeaderMap) {
let boundary = Alphanumeric.sample_string(&mut thread_rng(), 32);
create_form_data_payload_and_headers_with_boundary(
&boundary,
name,
filename,
content_type,
file,
)
}
/// Constructs a `multipart/form-data` payload from bytes and metadata with a fixed boundary.
///
/// See [`create_form_data_payload_and_headers`] for more details.
pub fn create_form_data_payload_and_headers_with_boundary(
boundary: &str,
name: &str,
filename: Option<String>,
content_type: Option<Mime>,
file: Bytes,
) -> (Bytes, HeaderMap) {
let mut buf = BytesMut::with_capacity(file.len() + 128);
let boundary_str = [BOUNDARY_PREFIX, boundary].concat();
let boundary = boundary_str.as_bytes();
buf.put(HYPHENS);
buf.put(boundary);
buf.put(CRLF);
buf.put(format!("Content-Disposition: form-data; name=\"{name}\"").as_bytes());
if let Some(filename) = filename {
buf.put(format!("; filename=\"{filename}\"").as_bytes());
}
buf.put(CRLF);
if let Some(ct) = content_type {
buf.put(format!("Content-Type: {ct}").as_bytes());
buf.put(CRLF);
}
buf.put(format!("Content-Length: {}", file.len()).as_bytes());
buf.put(CRLF_CRLF);
buf.put(file);
buf.put(CRLF);
buf.put(HYPHENS);
buf.put(boundary);
buf.put(HYPHENS);
buf.put(CRLF);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
format!("multipart/form-data; boundary=\"{boundary_str}\"")
.parse()
.unwrap(),
);
(buf.freeze(), headers)
}
#[cfg(test)]
mod tests {
use std::convert::Infallible;
use futures_util::stream;
use super::*;
fn find_boundary(headers: &HeaderMap) -> String {
headers
.get("content-type")
.unwrap()
.to_str()
.unwrap()
.parse::<mime::Mime>()
.unwrap()
.get_param(mime::BOUNDARY)
.unwrap()
.as_str()
.to_owned()
}
#[test]
fn wire_format() {
let (pl, headers) = create_form_data_payload_and_headers_with_boundary(
"qWeRtYuIoP",
"foo",
None,
None,
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
assert_eq!(
find_boundary(&headers),
"------------------------qWeRtYuIoP",
);
assert_eq!(
std::str::from_utf8(&pl).unwrap(),
"--------------------------qWeRtYuIoP\r\n\
Content-Disposition: form-data; name=\"foo\"\r\n\
Content-Length: 26\r\n\
\r\n\
Lorem ipsum dolor\n\
sit ame.\r\n\
--------------------------qWeRtYuIoP--\r\n",
);
let (pl, _headers) = create_form_data_payload_and_headers_with_boundary(
"qWeRtYuIoP",
"foo",
Some("Lorem.txt".to_owned()),
Some(mime::TEXT_PLAIN_UTF_8),
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
assert_eq!(
std::str::from_utf8(&pl).unwrap(),
"--------------------------qWeRtYuIoP\r\n\
Content-Disposition: form-data; name=\"foo\"; filename=\"Lorem.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 26\r\n\
\r\n\
Lorem ipsum dolor\n\
sit ame.\r\n\
--------------------------qWeRtYuIoP--\r\n",
);
}
/// Test using an external library to prevent the two-wrongs-make-a-right class of errors.
#[actix_web::test]
async fn ecosystem_compat() {
let (pl, headers) = create_form_data_payload_and_headers(
"foo",
None,
None,
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
let boundary = find_boundary(&headers);
let pl = stream::once(async { Ok::<_, Infallible>(pl) });
let mut form = multer::Multipart::new(pl, boundary);
let field = form.next_field().await.unwrap().unwrap();
assert_eq!(field.name().unwrap(), "foo");
assert_eq!(field.file_name(), None);
assert_eq!(field.content_type(), None);
assert!(field.bytes().await.unwrap().starts_with(b"Lorem"));
}
}

View file

@ -2,6 +2,13 @@
## Unreleased ## Unreleased
## 0.5.3
- Add `unicode` crate feature (on-by-default) to switch between `regex` and `regex-lite` as a trade-off between full unicode support and binary size.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.5.2
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 0.5.1 ## 0.5.1

View file

@ -1,6 +1,6 @@
[package] [package]
name = "actix-router" name = "actix-router"
version = "0.5.1" version = "0.5.3"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>", "Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
@ -12,17 +12,23 @@ repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[lib] [package.metadata.cargo_check_external_types]
name = "actix_router" allowed_external_types = [
path = "src/lib.rs" "http::*",
"serde::*",
]
[features] [features]
default = ["http"] default = ["http", "unicode"]
http = ["dep:http"]
unicode = ["dep:regex"]
[dependencies] [dependencies]
bytestring = ">=0.1.5, <2" bytestring = ">=0.1.5, <2"
cfg-if = "1"
http = { version = "0.2.7", optional = true } http = { version = "0.2.7", optional = true }
regex = "1.5" regex = { version = "1.5", optional = true }
regex-lite = "0.1"
serde = "1" serde = "1"
tracing = { version = "0.1.30", default-features = false, features = ["log"] } tracing = { version = "0.1.30", default-features = false, features = ["log"] }
@ -32,9 +38,13 @@ http = "0.2.7"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
percent-encoding = "2.1" percent-encoding = "2.1"
[lints]
workspace = true
[[bench]] [[bench]]
name = "router" name = "router"
harness = false harness = false
required-features = ["unicode"]
[[bench]] [[bench]]
name = "quoter" name = "quoter"

View file

@ -1,14 +1,18 @@
# `actix-router` # `actix-router`
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-router?label=latest)](https://crates.io/crates/actix-router) [![crates.io](https://img.shields.io/crates/v/actix-router?label=latest)](https://crates.io/crates/actix-router)
[![Documentation](https://docs.rs/actix-router/badge.svg?version=0.5.1)](https://docs.rs/actix-router/0.5.1) [![Documentation](https://docs.rs/actix-router/badge.svg?version=0.5.3)](https://docs.rs/actix-router/0.5.3)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-router.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-router.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-router/0.5.1/status.svg)](https://deps.rs/crate/actix-router/0.5.1) [![dependency status](https://deps.rs/crate/actix-router/0.5.3/status.svg)](https://deps.rs/crate/actix-router/0.5.3)
[![Download](https://img.shields.io/crates/d/actix-router.svg)](https://crates.io/crates/actix-router) [![Download](https://img.shields.io/crates/d/actix-router.svg)](https://crates.io/crates/actix-router)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start --> <!-- cargo-rdme start -->
Resource path matching and router. Resource path matching and router.

View file

@ -500,10 +500,10 @@ impl<'de> de::VariantAccess<'de> for UnitVariant {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use serde::{de, Deserialize}; use serde::Deserialize;
use super::*; use super::*;
use crate::{path::Path, router::Router, ResourceDef}; use crate::{router::Router, ResourceDef};
#[derive(Deserialize)] #[derive(Deserialize)]
struct MyStruct { struct MyStruct {
@ -511,11 +511,6 @@ mod tests {
value: String, value: String,
} }
#[derive(Deserialize)]
struct Id {
_id: String,
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct Test1(String, u32); struct Test1(String, u32);

View file

@ -1,7 +1,5 @@
//! Resource path matching and router. //! Resource path matching and router.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -10,6 +8,7 @@ mod de;
mod path; mod path;
mod pattern; mod pattern;
mod quoter; mod quoter;
mod regex_set;
mod resource; mod resource;
mod resource_path; mod resource_path;
mod router; mod router;

View file

@ -3,7 +3,7 @@ use std::{
ops::{DerefMut, Index}, ops::{DerefMut, Index},
}; };
use serde::de; use serde::{de, Deserialize};
use crate::{de::PathDeserializer, Resource, ResourcePath}; use crate::{de::PathDeserializer, Resource, ResourcePath};
@ -24,8 +24,13 @@ impl Default for PathItem {
/// If resource path contains variable patterns, `Path` stores them. /// If resource path contains variable patterns, `Path` stores them.
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct Path<T> { pub struct Path<T> {
/// Full path representation.
path: T, path: T,
/// Number of characters in `path` that have been processed into `segments`.
pub(crate) skip: u16, pub(crate) skip: u16,
/// List of processed dynamic segments; name->value pairs.
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>, pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
} }
@ -83,8 +88,8 @@ impl<T: ResourcePath> Path<T> {
/// Set new path. /// Set new path.
#[inline] #[inline]
pub fn set(&mut self, path: T) { pub fn set(&mut self, path: T) {
self.skip = 0;
self.path = path; self.path = path;
self.skip = 0;
self.segments.clear(); self.segments.clear();
} }
@ -103,7 +108,7 @@ impl<T: ResourcePath> Path<T> {
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) { pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
match value { match value {
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))), PathItem::Static(seg) => self.segments.push((name.into(), PathItem::Static(seg))),
PathItem::Segment(begin, end) => self.segments.push(( PathItem::Segment(begin, end) => self.segments.push((
name.into(), name.into(),
PathItem::Segment(self.skip + begin, self.skip + end), PathItem::Segment(self.skip + begin, self.skip + end),
@ -149,15 +154,11 @@ impl<T: ResourcePath> Path<T> {
None None
} }
/// Get matched parameter by name. /// Returns matched parameter by name.
/// ///
/// If keyed parameter is not available empty string is used as default value. /// If keyed parameter is not available empty string is used as default value.
pub fn query(&self, key: &str) -> &str { pub fn query(&self, key: &str) -> &str {
if let Some(s) = self.get(key) { self.get(key).unwrap_or_default()
s
} else {
""
}
} }
/// Return iterator to items in parameter container. /// Return iterator to items in parameter container.
@ -168,9 +169,13 @@ impl<T: ResourcePath> Path<T> {
} }
} }
/// Try to deserialize matching parameters to a specified type `U` /// Deserializes matching parameters to a specified type `U`.
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> { ///
de::Deserialize::deserialize(PathDeserializer::new(self)) /// # Errors
///
/// Returns error when dynamic path segments cannot be deserialized into a `U` type.
pub fn load<'de, U: Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
Deserialize::deserialize(PathDeserializer::new(self))
} }
} }

View file

@ -0,0 +1,66 @@
//! Abstraction over `regex` and `regex-lite` depending on whether we have `unicode` crate feature
//! enabled.
use cfg_if::cfg_if;
#[cfg(feature = "unicode")]
pub(crate) use regex::{escape, Regex};
#[cfg(not(feature = "unicode"))]
pub(crate) use regex_lite::{escape, Regex};
#[cfg(feature = "unicode")]
#[derive(Debug, Clone)]
pub(crate) struct RegexSet(regex::RegexSet);
#[cfg(not(feature = "unicode"))]
#[derive(Debug, Clone)]
pub(crate) struct RegexSet(Vec<regex_lite::Regex>);
impl RegexSet {
/// Create a new regex set.
///
/// # Panics
///
/// Panics if any path patterns are malformed.
pub(crate) fn new(re_set: Vec<String>) -> Self {
cfg_if! {
if #[cfg(feature = "unicode")] {
Self(regex::RegexSet::new(re_set).unwrap())
} else {
Self(re_set.iter().map(|re| Regex::new(re).unwrap()).collect())
}
}
}
/// Create a new empty regex set.
pub(crate) fn empty() -> Self {
cfg_if! {
if #[cfg(feature = "unicode")] {
Self(regex::RegexSet::empty())
} else {
Self(Vec::new())
}
}
}
/// Returns true if regex set matches `path`.
pub(crate) fn is_match(&self, path: &str) -> bool {
cfg_if! {
if #[cfg(feature = "unicode")] {
self.0.is_match(path)
} else {
self.0.iter().any(|re| re.is_match(path))
}
}
}
/// Returns index within `path` of first match.
pub(crate) fn first_match_idx(&self, path: &str) -> Option<usize> {
cfg_if! {
if #[cfg(feature = "unicode")] {
self.0.matches(path).into_iter().next()
} else {
Some(self.0.iter().enumerate().find(|(_, re)| re.is_match(path))?.0)
}
}
}
}

View file

@ -5,10 +5,13 @@ use std::{
mem, mem,
}; };
use regex::{escape, Regex, RegexSet};
use tracing::error; use tracing::error;
use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath}; use crate::{
path::PathItem,
regex_set::{escape, Regex, RegexSet},
IntoPatterns, Patterns, Resource, ResourcePath,
};
const MAX_DYNAMIC_SEGMENTS: usize = 16; const MAX_DYNAMIC_SEGMENTS: usize = 16;
@ -233,7 +236,7 @@ enum PatternSegment {
Var(String), Var(String),
} }
#[derive(Clone, Debug)] #[derive(Debug, Clone)]
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
enum PatternType { enum PatternType {
/// Single constant/literal segment. /// Single constant/literal segment.
@ -603,7 +606,7 @@ impl ResourceDef {
PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()), PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()),
PatternType::DynamicSet(re, params) => { PatternType::DynamicSet(re, params) => {
let idx = re.matches(path).into_iter().next()?; let idx = re.first_match_idx(path)?;
let (ref pattern, _) = params[idx]; let (ref pattern, _) = params[idx];
Some(pattern.captures(path)?[1].len()) Some(pattern.captures(path)?[1].len())
} }
@ -706,7 +709,7 @@ impl ResourceDef {
PatternType::DynamicSet(re, params) => { PatternType::DynamicSet(re, params) => {
let path = path.unprocessed(); let path = path.unprocessed();
let (pattern, names) = match re.matches(path).into_iter().next() { let (pattern, names) = match re.first_match_idx(path) {
Some(idx) => &params[idx], Some(idx) => &params[idx],
_ => return false, _ => return false,
}; };
@ -870,7 +873,7 @@ impl ResourceDef {
} }
} }
let pattern_re_set = RegexSet::new(re_set).unwrap(); let pattern_re_set = RegexSet::new(re_set);
let segments = segments.unwrap_or_default(); let segments = segments.unwrap_or_default();
( (

View file

@ -2,6 +2,21 @@
## Unreleased ## Unreleased
## 0.1.5
- Add `TestServerConfig::listen_address()` method.
## 0.1.4
- Add `TestServerConfig::rustls_0_23()` method for Rustls v0.23 support behind new `rustls-0_23` crate feature.
- Add `TestServerConfig::disable_redirects()` method.
- Various types from `awc`, such as `ClientRequest` and `ClientResponse`, are now re-exported.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.1.3
- Add `TestServerConfig::rustls_0_22()` method for Rustls v0.22 support behind new `rustls-0_22` crate feature.
## 0.1.2 ## 0.1.2
- Add `TestServerConfig::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature. - Add `TestServerConfig::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature.

View file

@ -1,6 +1,6 @@
[package] [package]
name = "actix-test" name = "actix-test"
version = "0.1.2" version = "0.1.5"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@ -18,6 +18,22 @@ categories = [
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http_test::*",
"actix_http::*",
"actix_service::*",
"actix_web::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"openssl::*",
"rustls::*",
"tokio::*",
]
[features] [features]
default = [] default = []
@ -27,19 +43,23 @@ rustls = ["rustls-0_20"]
rustls-0_20 = ["tls-rustls-0_20", "actix-http/rustls-0_20", "awc/rustls-0_20"] rustls-0_20 = ["tls-rustls-0_20", "actix-http/rustls-0_20", "awc/rustls-0_20"]
# TLS via Rustls v0.21 # TLS via Rustls v0.21
rustls-0_21 = ["tls-rustls-0_21", "actix-http/rustls-0_21", "awc/rustls-0_21"] rustls-0_21 = ["tls-rustls-0_21", "actix-http/rustls-0_21", "awc/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["tls-rustls-0_22", "actix-http/rustls-0_22", "awc/rustls-0_22-webpki-roots"]
# TLS via Rustls v0.23
rustls-0_23 = ["tls-rustls-0_23", "actix-http/rustls-0_23", "awc/rustls-0_23-webpki-roots"]
# TLS via OpenSSL # TLS via OpenSSL
openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"] openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
[dependencies] [dependencies]
actix-codec = "0.5" actix-codec = "0.5"
actix-http = "3" actix-http = "3.7"
actix-http-test = "3" actix-http-test = "3"
actix-rt = "2.1" actix-rt = "2.1"
actix-service = "2" actix-service = "2"
actix-utils = "3" actix-utils = "3"
actix-web = { version = "4", default-features = false, features = ["cookies"] } actix-web = { version = "4.6", default-features = false, features = ["cookies"] }
awc = { version = "3", default-features = false, features = ["cookies"] } awc = { version = "3.5", default-features = false, features = ["cookies"] }
futures-core = { version = "0.3.17", default-features = false, features = ["std"] } futures-core = { version = "0.3.17", default-features = false, features = ["std"] }
futures-util = { version = "0.3.17", default-features = false, features = [] } futures-util = { version = "0.3.17", default-features = false, features = [] }
@ -50,4 +70,9 @@ serde_urlencoded = "0.7"
tls-openssl = { package = "openssl", version = "0.10.55", optional = true } tls-openssl = { package = "openssl", version = "0.10.55", optional = true }
tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true } tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true }
tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true } tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true }
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
tokio = { version = "1.24.2", features = ["sync"] } tokio = { version = "1.24.2", features = ["sync"] }
[lints]
workspace = true

45
actix-test/README.md Normal file
View file

@ -0,0 +1,45 @@
# `actix-test`
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-test?label=latest)](https://crates.io/crates/actix-test)
[![Documentation](https://docs.rs/actix-test/badge.svg?version=0.1.5)](https://docs.rs/actix-test/0.1.5)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-test.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-test/0.1.5/status.svg)](https://deps.rs/crate/actix-test/0.1.5)
[![Download](https://img.shields.io/crates/d/actix-test.svg)](https://crates.io/crates/actix-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start -->
Integration testing tools for Actix Web applications.
The main integration testing tool is [`TestServer`]. It spawns a real HTTP server on an unused port and provides methods that use a real HTTP client. Therefore, it is much closer to real-world cases than using `init_service`, which skips HTTP encoding and decoding.
## Examples
```rust
use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
#[get("/")]
async fn my_handler() -> Result<impl Responder, Error> {
Ok(HttpResponse::Ok())
}
#[actix_rt::test]
async fn test_example() {
let srv = actix_test::start(||
App::new().service(my_handler)
);
let req = srv.get("/");
let res = req.send().await.unwrap();
assert!(res.status().is_success());
}
```
<!-- cargo-rdme end -->

View file

@ -5,6 +5,7 @@
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding. //! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
//! //!
//! # Examples //! # Examples
//!
//! ``` //! ```
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder}; //! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
//! //!
@ -26,8 +27,6 @@
//! } //! }
//! ``` //! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -52,7 +51,7 @@ use actix_web::{
rt::{self, System}, rt::{self, System},
web, Error, web, Error,
}; };
use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector}; pub use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector};
use futures_core::Stream; use futures_core::Stream;
use tokio::sync::mpsc; use tokio::sync::mpsc;
@ -143,12 +142,18 @@ where
StreamType::Rustls020(_) => true, StreamType::Rustls020(_) => true,
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
StreamType::Rustls021(_) => true, StreamType::Rustls021(_) => true,
#[cfg(feature = "rustls-0_22")]
StreamType::Rustls022(_) => true,
#[cfg(feature = "rustls-0_23")]
StreamType::Rustls023(_) => true,
}; };
let client_cfg = cfg.clone();
// run server in separate orphaned thread // run server in separate orphaned thread
thread::spawn(move || { thread::spawn(move || {
rt::System::new().block_on(async move { rt::System::new().block_on(async move {
let tcp = net::TcpListener::bind(("127.0.0.1", cfg.port)).unwrap(); let tcp = net::TcpListener::bind((cfg.listen_address.clone(), cfg.port)).unwrap();
let local_addr = tcp.local_addr().unwrap(); let local_addr = tcp.local_addr().unwrap();
let factory = factory.clone(); let factory = factory.clone();
let srv_cfg = cfg.clone(); let srv_cfg = cfg.clone();
@ -327,6 +332,90 @@ where
.rustls_021(config.clone()) .rustls_021(config.clone())
}), }),
}, },
#[cfg(feature = "rustls-0_22")]
StreamType::Rustls022(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h1(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h2(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.finish(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
},
#[cfg(feature = "rustls-0_23")]
StreamType::Rustls023(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h1(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h2(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.finish(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
},
} }
.expect("test server could not be created"); .expect("test server could not be created");
@ -372,7 +461,13 @@ where
} }
}; };
Client::builder().connector(connector).finish() let mut client_builder = Client::builder().connector(connector);
if client_cfg.disable_redirects {
client_builder = client_builder.disable_redirects();
}
client_builder.finish()
}; };
TestServer { TestServer {
@ -392,6 +487,7 @@ enum HttpVer {
Both, Both,
} }
#[allow(clippy::large_enum_variant)]
#[derive(Clone)] #[derive(Clone)]
enum StreamType { enum StreamType {
Tcp, Tcp,
@ -401,6 +497,10 @@ enum StreamType {
Rustls020(tls_rustls_0_20::ServerConfig), Rustls020(tls_rustls_0_20::ServerConfig),
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
Rustls021(tls_rustls_0_21::ServerConfig), Rustls021(tls_rustls_0_21::ServerConfig),
#[cfg(feature = "rustls-0_22")]
Rustls022(tls_rustls_0_22::ServerConfig),
#[cfg(feature = "rustls-0_23")]
Rustls023(tls_rustls_0_23::ServerConfig),
} }
/// Create default test server config. /// Create default test server config.
@ -413,8 +513,10 @@ pub struct TestServerConfig {
tp: HttpVer, tp: HttpVer,
stream: StreamType, stream: StreamType,
client_request_timeout: Duration, client_request_timeout: Duration,
listen_address: String,
port: u16, port: u16,
workers: usize, workers: usize,
disable_redirects: bool,
} }
impl Default for TestServerConfig { impl Default for TestServerConfig {
@ -424,56 +526,96 @@ impl Default for TestServerConfig {
} }
impl TestServerConfig { impl TestServerConfig {
/// Create default server configuration /// Constructs default server configuration.
pub(crate) fn new() -> TestServerConfig { pub(crate) fn new() -> TestServerConfig {
TestServerConfig { TestServerConfig {
tp: HttpVer::Both, tp: HttpVer::Both,
stream: StreamType::Tcp, stream: StreamType::Tcp,
client_request_timeout: Duration::from_secs(5), client_request_timeout: Duration::from_secs(5),
listen_address: "127.0.0.1".to_string(),
port: 0, port: 0,
workers: 1, workers: 1,
disable_redirects: false,
} }
} }
/// Accept HTTP/1.1 only. /// Accepts HTTP/1.1 only.
pub fn h1(mut self) -> Self { pub fn h1(mut self) -> Self {
self.tp = HttpVer::Http1; self.tp = HttpVer::Http1;
self self
} }
/// Accept HTTP/2 only. /// Accepts HTTP/2 only.
pub fn h2(mut self) -> Self { pub fn h2(mut self) -> Self {
self.tp = HttpVer::Http2; self.tp = HttpVer::Http2;
self self
} }
/// Accept secure connections via OpenSSL. /// Accepts secure connections via OpenSSL.
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
pub fn openssl(mut self, acceptor: openssl::ssl::SslAcceptor) -> Self { pub fn openssl(mut self, acceptor: openssl::ssl::SslAcceptor) -> Self {
self.stream = StreamType::Openssl(acceptor); self.stream = StreamType::Openssl(acceptor);
self self
} }
/// Accept secure connections via Rustls. #[doc(hidden)]
#[deprecated(note = "Renamed to `rustls_0_20()`.")]
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
pub fn rustls(mut self, config: tls_rustls_0_20::ServerConfig) -> Self { pub fn rustls(mut self, config: tls_rustls_0_20::ServerConfig) -> Self {
self.stream = StreamType::Rustls020(config); self.stream = StreamType::Rustls020(config);
self self
} }
/// Accept secure connections via Rustls. /// Accepts secure connections via Rustls v0.20.
#[cfg(feature = "rustls-0_20")]
pub fn rustls_0_20(mut self, config: tls_rustls_0_20::ServerConfig) -> Self {
self.stream = StreamType::Rustls020(config);
self
}
#[doc(hidden)]
#[deprecated(note = "Renamed to `rustls_0_21()`.")]
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
pub fn rustls_021(mut self, config: tls_rustls_0_21::ServerConfig) -> Self { pub fn rustls_021(mut self, config: tls_rustls_0_21::ServerConfig) -> Self {
self.stream = StreamType::Rustls021(config); self.stream = StreamType::Rustls021(config);
self self
} }
/// Set client timeout for first request. /// Accepts secure connections via Rustls v0.21.
#[cfg(feature = "rustls-0_21")]
pub fn rustls_0_21(mut self, config: tls_rustls_0_21::ServerConfig) -> Self {
self.stream = StreamType::Rustls021(config);
self
}
/// Accepts secure connections via Rustls v0.22.
#[cfg(feature = "rustls-0_22")]
pub fn rustls_0_22(mut self, config: tls_rustls_0_22::ServerConfig) -> Self {
self.stream = StreamType::Rustls022(config);
self
}
/// Accepts secure connections via Rustls v0.23.
#[cfg(feature = "rustls-0_23")]
pub fn rustls_0_23(mut self, config: tls_rustls_0_23::ServerConfig) -> Self {
self.stream = StreamType::Rustls023(config);
self
}
/// Sets client timeout for first request.
pub fn client_request_timeout(mut self, dur: Duration) -> Self { pub fn client_request_timeout(mut self, dur: Duration) -> Self {
self.client_request_timeout = dur; self.client_request_timeout = dur;
self self
} }
/// Sets the address the server will listen on.
///
/// By default, only listens on `127.0.0.1`.
pub fn listen_address(mut self, addr: impl Into<String>) -> Self {
self.listen_address = addr.into();
self
}
/// Sets test server port. /// Sets test server port.
/// ///
/// By default, a random free port is determined by the OS. /// By default, a random free port is determined by the OS.
@ -489,6 +631,15 @@ impl TestServerConfig {
self.workers = workers; self.workers = workers;
self self
} }
/// Instruct the client to not follow redirects.
///
/// By default, the client will follow up to 10 consecutive redirects
/// before giving up.
pub fn disable_redirects(mut self) -> Self {
self.disable_redirects = true;
self
}
} }
/// A basic HTTP server controller that simplifies the process of writing integration tests for /// A basic HTTP server controller that simplifies the process of writing integration tests for
@ -515,9 +666,9 @@ impl TestServer {
let scheme = if self.tls { "https" } else { "http" }; let scheme = if self.tls { "https" } else { "http" };
if uri.starts_with('/') { if uri.starts_with('/') {
format!("{}://localhost:{}{}", scheme, self.addr.port(), uri) format!("{}://{}{}", scheme, self.addr, uri)
} else { } else {
format!("{}://localhost:{}/{}", scheme, self.addr.port(), uri) format!("{}://{}/{}", scheme, self.addr, uri)
} }
} }

View file

@ -2,6 +2,14 @@
## Unreleased ## Unreleased
## 4.3.1 <!-- v4.3.1+deprecated -->
- Reduce memory usage by `take`-ing (rather than `split`-ing) the encoded buffer when yielding bytes in the response stream.
- Mark crate as deprecated.
- Minimum supported Rust version (MSRV) is now 1.72.
## 4.3.0
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 4.2.0 ## 4.2.0

View file

@ -1,17 +1,24 @@
[package] [package]
name = "actix-web-actors" name = "actix-web-actors"
version = "4.2.0" version = "4.3.1+deprecated"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web" description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
homepage = "https://actix.rs" homepage.workspace = true
repository = "https://github.com/actix/actix-web" repository.workspace = true
license = "MIT OR Apache-2.0" license.workspace = true
edition = "2021" edition.workspace = true
rust-version.workspace = true
[lib] [package.metadata.cargo_check_external_types]
name = "actix_web_actors" allowed_external_types = [
path = "src/lib.rs" "actix::*",
"actix_http::*",
"actix_web::*",
"bytes::*",
"bytestring::*",
"futures_core::*",
]
[dependencies] [dependencies]
actix = { version = ">=0.12, <0.14", default-features = false } actix = { version = ">=0.12, <0.14", default-features = false }
@ -32,6 +39,9 @@ actix-test = "0.1"
awc = { version = "3", default-features = false } awc = { version = "3", default-features = false }
actix-web = { version = "4", features = ["macros"] } actix-web = { version = "4", features = ["macros"] }
env_logger = "0.10" env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] } futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
mime = "0.3" mime = "0.3"
[lints]
workspace = true

View file

@ -1,17 +1,18 @@
# actix-web-actors # `actix-web-actors`
> Actix actors support for Actix Web. > Actix actors support for Actix Web.
>
> This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.2.0)](https://docs.rs/actix-web-actors/4.2.0) [![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.1)](https://docs.rs/actix-web-actors/4.3.1)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-actors.svg) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.2.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.2.0) ![maintenance-status](https://img.shields.io/badge/maintenance-deprecated-red.svg)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors) [![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-web-actors)
- Minimum Supported Rust Version (MSRV): 1.68

View file

@ -248,13 +248,11 @@ where
mod tests { mod tests {
use std::time::Duration; use std::time::Duration;
use actix::Actor;
use actix_web::{ use actix_web::{
http::StatusCode, http::StatusCode,
test::{call_service, init_service, read_body, TestRequest}, test::{call_service, init_service, read_body, TestRequest},
web, App, HttpResponse, web, App, HttpResponse,
}; };
use bytes::Bytes;
use super::*; use super::*;

View file

@ -1,5 +1,7 @@
//! Actix actors support for Actix Web. //! Actix actors support for Actix Web.
//! //!
//! This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
//!
//! # Examples //! # Examples
//! //!
//! ```no_run //! ```no_run
@ -55,8 +57,6 @@
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses. //! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
//! //!
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]

View file

@ -710,7 +710,7 @@ where
} }
if !this.buf.is_empty() { if !this.buf.is_empty() {
Poll::Ready(Some(Ok(this.buf.split().freeze()))) Poll::Ready(Some(Ok(std::mem::take(&mut this.buf).freeze())))
} else if this.fut.alive() && !this.closed { } else if this.fut.alive() && !this.closed {
Poll::Pending Poll::Pending
} else { } else {
@ -817,10 +817,7 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use actix_web::{ use actix_web::test::TestRequest;
http::{header, Method},
test::TestRequest,
};
use super::*; use super::*;

View file

@ -2,6 +2,13 @@
## Unreleased ## Unreleased
## 4.3.0
- Add `#[scope]` macro.
- Add `compat-routing-macros-force-pub` crate feature which, on-by-default, which when disabled causes handlers to inherit their attached function's visibility.
- Prevent inclusion of default `actix-router` features.
- Minimum supported Rust version (MSRV) is now 1.72.
## 4.2.2 ## 4.2.2
- Fix regression when declaring `wrap` attribute using an expression. - Fix regression when declaring `wrap` attribute using an expression.

View file

@ -1,21 +1,26 @@
[package] [package]
name = "actix-web-codegen" name = "actix-web-codegen"
version = "4.2.2" version = "4.3.0"
description = "Routing and runtime macros for Actix Web" description = "Routing and runtime macros for Actix Web"
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
] ]
license = "MIT OR Apache-2.0" homepage.workspace = true
edition = "2021" repository.workspace = true
license.workspace = true
edition.workspace = true
rust-version.workspace = true
[lib] [lib]
proc-macro = true proc-macro = true
[features]
default = ["compat-routing-macros-force-pub"]
compat-routing-macros-force-pub = []
[dependencies] [dependencies]
actix-router = "0.5" actix-router = { version = "0.5", default-features = false }
proc-macro2 = "1" proc-macro2 = "1"
quote = "1" quote = "1"
syn = { version = "2", features = ["full", "extra-traits"] } syn = { version = "2", features = ["full", "extra-traits"] }
@ -30,3 +35,6 @@ actix-web = "4"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
trybuild = "1" trybuild = "1"
rustversion = "1" rustversion = "1"
[lints]
workspace = true

View file

@ -1,20 +1,19 @@
# actix-web-codegen # `actix-web-codegen`
> Routing and runtime macros for Actix Web. > Routing and runtime macros for Actix Web.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.2.2)](https://docs.rs/actix-web-codegen/4.2.2) [![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.3.0)](https://docs.rs/actix-web-codegen/4.3.0)
![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-codegen.svg) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-codegen/4.2.2/status.svg)](https://deps.rs/crate/actix-web-codegen/4.2.2) [![dependency status](https://deps.rs/crate/actix-web-codegen/4.3.0/status.svg)](https://deps.rs/crate/actix-web-codegen/4.3.0)
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen) [![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources <!-- prettier-ignore-end -->
- [API Documentation](https://docs.rs/actix-web-codegen)
- Minimum Supported Rust Version (MSRV): 1.68
## Compile Testing ## Compile Testing

View file

@ -73,8 +73,6 @@
//! [DELETE]: macro@delete //! [DELETE]: macro@delete
#![recursion_limit = "512"] #![recursion_limit = "512"]
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -83,6 +81,7 @@ use proc_macro::TokenStream;
use quote::quote; use quote::quote;
mod route; mod route;
mod scope;
/// Creates resource handler, allowing multiple HTTP method guards. /// Creates resource handler, allowing multiple HTTP method guards.
/// ///
@ -197,6 +196,43 @@ method_macro!(Options, options);
method_macro!(Trace, trace); method_macro!(Trace, trace);
method_macro!(Patch, patch); method_macro!(Patch, patch);
/// Prepends a path prefix to all handlers using routing macros inside the attached module.
///
/// # Syntax
///
/// ```
/// # use actix_web_codegen::scope;
/// #[scope("/prefix")]
/// mod api {
/// // ...
/// }
/// ```
///
/// # Arguments
///
/// - `"/prefix"` - Raw literal string to be prefixed onto contained handlers' paths.
///
/// # Example
///
/// ```
/// # use actix_web_codegen::{scope, get};
/// # use actix_web::Responder;
/// #[scope("/api")]
/// mod api {
/// # use super::*;
/// #[get("/hello")]
/// pub async fn hello() -> impl Responder {
/// // this has path /api/hello
/// "Hello, world!"
/// }
/// }
/// # fn main() {}
/// ```
#[proc_macro_attribute]
pub fn scope(args: TokenStream, input: TokenStream) -> TokenStream {
scope::with_scope(args, input)
}
/// Marks async main function as the Actix Web system entry-point. /// Marks async main function as the Actix Web system entry-point.
/// ///
/// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is /// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is
@ -240,3 +276,15 @@ pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
output.extend(item); output.extend(item);
output output
} }
/// Converts the error to a token stream and appends it to the original input.
///
/// Returning the original input in addition to the error is good for IDEs which can gracefully
/// recover and show more precise errors within the macro body.
///
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
let compile_err = TokenStream::from(err.to_compile_error());
item.extend(compile_err);
item
}

View file

@ -6,10 +6,12 @@ use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{quote, ToTokens, TokenStreamExt}; use quote::{quote, ToTokens, TokenStreamExt};
use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token}; use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token};
use crate::input_and_compile_error;
#[derive(Debug)] #[derive(Debug)]
pub struct RouteArgs { pub struct RouteArgs {
path: syn::LitStr, pub(crate) path: syn::LitStr,
options: Punctuated<syn::MetaNameValue, Token![,]>, pub(crate) options: Punctuated<syn::MetaNameValue, Token![,]>,
} }
impl syn::parse::Parse for RouteArgs { impl syn::parse::Parse for RouteArgs {
@ -78,7 +80,7 @@ macro_rules! standard_method_type {
} }
} }
fn from_path(method: &Path) -> Result<Self, ()> { pub(crate) fn from_path(method: &Path) -> Result<Self, ()> {
match () { match () {
$(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+ $(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+
_ => Err(()), _ => Err(()),
@ -411,6 +413,13 @@ impl ToTokens for Route {
doc_attributes, doc_attributes,
} = self; } = self;
#[allow(unused_variables)] // used when force-pub feature is disabled
let vis = &ast.vis;
// TODO(breaking): remove this force-pub forwards-compatibility feature
#[cfg(feature = "compat-routing-macros-force-pub")]
let vis = syn::Visibility::Public(<Token![pub]>::default());
let registrations: TokenStream2 = args let registrations: TokenStream2 = args
.iter() .iter()
.map(|args| { .map(|args| {
@ -458,7 +467,7 @@ impl ToTokens for Route {
let stream = quote! { let stream = quote! {
#(#doc_attributes)* #(#doc_attributes)*
#[allow(non_camel_case_types, missing_docs)] #[allow(non_camel_case_types, missing_docs)]
pub struct #name; #vis struct #name;
impl ::actix_web::dev::HttpServiceFactory for #name { impl ::actix_web::dev::HttpServiceFactory for #name {
fn register(self, __config: &mut actix_web::dev::AppService) { fn register(self, __config: &mut actix_web::dev::AppService) {
@ -542,15 +551,3 @@ pub(crate) fn with_methods(input: TokenStream) -> TokenStream {
Err(err) => input_and_compile_error(input, err), Err(err) => input_and_compile_error(input, err),
} }
} }
/// Converts the error to a token stream and appends it to the original input.
///
/// Returning the original input in addition to the error is good for IDEs which can gracefully
/// recover and show more precise errors within the macro body.
///
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
let compile_err = TokenStream::from(err.to_compile_error());
item.extend(compile_err);
item
}

View file

@ -0,0 +1,103 @@
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{quote, ToTokens as _};
use crate::{
input_and_compile_error,
route::{MethodType, RouteArgs},
};
pub fn with_scope(args: TokenStream, input: TokenStream) -> TokenStream {
match with_scope_inner(args, input.clone()) {
Ok(stream) => stream,
Err(err) => input_and_compile_error(input, err),
}
}
fn with_scope_inner(args: TokenStream, input: TokenStream) -> syn::Result<TokenStream> {
if args.is_empty() {
return Err(syn::Error::new(
Span::call_site(),
"missing arguments for scope macro, expected: #[scope(\"/prefix\")]",
));
}
let scope_prefix = syn::parse::<syn::LitStr>(args.clone()).map_err(|err| {
syn::Error::new(
err.span(),
"argument to scope macro is not a string literal, expected: #[scope(\"/prefix\")]",
)
})?;
let scope_prefix_value = scope_prefix.value();
if scope_prefix_value.ends_with('/') {
// trailing slashes cause non-obvious problems
// it's better to point them out to developers rather than
return Err(syn::Error::new(
scope_prefix.span(),
"scopes should not have trailing slashes; see https://docs.rs/actix-web/4/actix_web/struct.Scope.html#avoid-trailing-slashes",
));
}
let mut module = syn::parse::<syn::ItemMod>(input).map_err(|err| {
syn::Error::new(err.span(), "#[scope] macro must be attached to a module")
})?;
// modify any routing macros (method or route[s]) attached to
// functions by prefixing them with this scope macro's argument
if let Some((_, items)) = &mut module.content {
for item in items {
if let syn::Item::Fn(fun) = item {
fun.attrs = fun
.attrs
.iter()
.map(|attr| modify_attribute_with_scope(attr, &scope_prefix_value))
.collect();
}
}
}
Ok(module.to_token_stream().into())
}
/// Checks if the attribute is a method type and has a route path, then modifies it.
fn modify_attribute_with_scope(attr: &syn::Attribute, scope_path: &str) -> syn::Attribute {
match (attr.parse_args::<RouteArgs>(), attr.clone().meta) {
(Ok(route_args), syn::Meta::List(meta_list)) if has_allowed_methods_in_scope(attr) => {
let modified_path = format!("{}{}", scope_path, route_args.path.value());
let options_tokens: Vec<TokenStream2> = route_args
.options
.iter()
.map(|option| {
quote! { ,#option }
})
.collect();
let combined_options_tokens: TokenStream2 =
options_tokens
.into_iter()
.fold(TokenStream2::new(), |mut acc, ts| {
acc.extend(std::iter::once(ts));
acc
});
syn::Attribute {
meta: syn::Meta::List(syn::MetaList {
tokens: quote! { #modified_path #combined_options_tokens },
..meta_list.clone()
}),
..attr.clone()
}
}
_ => attr.clone(),
}
}
fn has_allowed_methods_in_scope(attr: &syn::Attribute) -> bool {
MethodType::from_path(attr.path()).is_ok()
|| attr.path().is_ident("route")
|| attr.path().is_ident("ROUTE")
}

View file

@ -145,7 +145,7 @@ async fn custom_resource_name_test<'a>(req: HttpRequest) -> impl Responder {
mod guard_module { mod guard_module {
use actix_web::{guard::GuardContext, http::header}; use actix_web::{guard::GuardContext, http::header};
pub fn guard(ctx: &GuardContext) -> bool { pub fn guard(ctx: &GuardContext<'_>) -> bool {
ctx.header::<header::Accept>() ctx.header::<header::Accept>()
.map(|h| h.preference() == "image/*") .map(|h| h.preference() == "image/*")
.unwrap_or(false) .unwrap_or(false)

View file

@ -0,0 +1,200 @@
use actix_web::{guard::GuardContext, http, http::header, web, App, HttpResponse, Responder};
use actix_web_codegen::{delete, get, post, route, routes, scope};
pub fn image_guard(ctx: &GuardContext<'_>) -> bool {
ctx.header::<header::Accept>()
.map(|h| h.preference() == "image/*")
.unwrap_or(false)
}
#[scope("/test")]
mod scope_module {
// ensure that imports can be brought into the scope
use super::*;
#[get("/test/guard", guard = "image_guard")]
pub async fn guard() -> impl Responder {
HttpResponse::Ok()
}
#[get("/test")]
pub async fn test() -> impl Responder {
HttpResponse::Ok().finish()
}
#[get("/twice-test/{value}")]
pub async fn twice(value: web::Path<String>) -> impl actix_web::Responder {
let int_value: i32 = value.parse().unwrap_or(0);
let doubled = int_value * 2;
HttpResponse::Ok().body(format!("Twice value: {}", doubled))
}
#[post("/test")]
pub async fn post() -> impl Responder {
HttpResponse::Ok().body("post works")
}
#[delete("/test")]
pub async fn delete() -> impl Responder {
"delete works"
}
#[route("/test", method = "PUT", method = "PATCH", method = "CUSTOM")]
pub async fn multiple_shared_path() -> impl Responder {
HttpResponse::Ok().finish()
}
#[routes]
#[head("/test1")]
#[connect("/test2")]
#[options("/test3")]
#[trace("/test4")]
pub async fn multiple_separate_paths() -> impl Responder {
HttpResponse::Ok().finish()
}
// test calling this from other mod scope with scope attribute...
pub fn mod_common(message: String) -> impl actix_web::Responder {
HttpResponse::Ok().body(message)
}
}
/// Scope doc string to check in cargo expand.
#[scope("/v1")]
mod mod_scope_v1 {
use super::*;
/// Route doc string to check in cargo expand.
#[get("/test")]
pub async fn test() -> impl Responder {
scope_module::mod_common("version1 works".to_string())
}
}
#[scope("/v2")]
mod mod_scope_v2 {
use super::*;
// check to make sure non-function tokens in the scope block are preserved...
enum TestEnum {
Works,
}
#[get("/test")]
pub async fn test() -> impl Responder {
// make sure this type still exists...
let test_enum = TestEnum::Works;
match test_enum {
TestEnum::Works => scope_module::mod_common("version2 works".to_string()),
}
}
}
#[actix_rt::test]
async fn scope_get_async() {
let srv = actix_test::start(|| App::new().service(scope_module::test));
let request = srv.request(http::Method::GET, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_get_param_async() {
let srv = actix_test::start(|| App::new().service(scope_module::twice));
let request = srv.request(http::Method::GET, srv.url("/test/twice-test/4"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "Twice value: 8");
}
#[actix_rt::test]
async fn scope_post_async() {
let srv = actix_test::start(|| App::new().service(scope_module::post));
let request = srv.request(http::Method::POST, srv.url("/test/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "post works");
}
#[actix_rt::test]
async fn multiple_shared_path_async() {
let srv = actix_test::start(|| App::new().service(scope_module::multiple_shared_path));
let request = srv.request(http::Method::PUT, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::PATCH, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn multiple_multi_path_async() {
let srv = actix_test::start(|| App::new().service(scope_module::multiple_separate_paths));
let request = srv.request(http::Method::HEAD, srv.url("/test/test1"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::CONNECT, srv.url("/test/test2"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::OPTIONS, srv.url("/test/test3"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::TRACE, srv.url("/test/test4"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_delete_async() {
let srv = actix_test::start(|| App::new().service(scope_module::delete));
let request = srv.request(http::Method::DELETE, srv.url("/test/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "delete works");
}
#[actix_rt::test]
async fn scope_get_with_guard_async() {
let srv = actix_test::start(|| App::new().service(scope_module::guard));
let request = srv
.request(http::Method::GET, srv.url("/test/test/guard"))
.insert_header(("Accept", "image/*"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_v1_v2_async() {
let srv = actix_test::start(|| {
App::new()
.service(mod_scope_v1::test)
.service(mod_scope_v2::test)
});
let request = srv.request(http::Method::GET, srv.url("/v1/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "version1 works");
let request = srv.request(http::Method::GET, srv.url("/v2/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "version2 works");
}

View file

@ -1,4 +1,4 @@
#[rustversion::stable(1.68)] // MSRV #[rustversion::stable(1.72)] // MSRV
#[test] #[test]
fn compile_macros() { fn compile_macros() {
let t = trybuild::TestCases::new(); let t = trybuild::TestCases::new();
@ -18,6 +18,11 @@ fn compile_macros() {
t.compile_fail("tests/trybuild/routes-missing-method-fail.rs"); t.compile_fail("tests/trybuild/routes-missing-method-fail.rs");
t.compile_fail("tests/trybuild/routes-missing-args-fail.rs"); t.compile_fail("tests/trybuild/routes-missing-args-fail.rs");
t.compile_fail("tests/trybuild/scope-on-handler.rs");
t.compile_fail("tests/trybuild/scope-missing-args.rs");
t.compile_fail("tests/trybuild/scope-invalid-args.rs");
t.compile_fail("tests/trybuild/scope-trailing-slash.rs");
t.pass("tests/trybuild/docstring-ok.rs"); t.pass("tests/trybuild/docstring-ok.rs");
t.pass("tests/trybuild/test-runtime.rs"); t.pass("tests/trybuild/test-runtime.rs");

Some files were not shown because too many files have changed in this diff Show more