mirror of
https://github.com/actix/actix-web.git
synced 2024-12-17 13:46:36 +00:00
Merge branch 'master' into asonix/play-with-h1-encoding
This commit is contained in:
commit
5e28e69e29
167 changed files with 4670 additions and 2263 deletions
|
@ -1,10 +0,0 @@
|
||||||
[alias]
|
|
||||||
lint = "clippy --workspace --all-targets -- -Dclippy::todo"
|
|
||||||
lint-all = "clippy --workspace --all-features --all-targets -- -Dclippy::todo"
|
|
||||||
|
|
||||||
# lib checking
|
|
||||||
ci-check-min = "hack --workspace check --no-default-features"
|
|
||||||
ci-check-default = "hack --workspace check"
|
|
||||||
ci-check-default-tests = "check --workspace --tests"
|
|
||||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --depth=4 --skip=__compress,experimental-io-uring check"
|
|
||||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --depth=4 --skip=__compress check"
|
|
7
.clippy.toml
Normal file
7
.clippy.toml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
disallowed-names = [
|
||||||
|
"e", # no single letter error bindings
|
||||||
|
]
|
||||||
|
disallowed-methods = [
|
||||||
|
"std::cell::RefCell::default()",
|
||||||
|
"std::rc::Rc::default()",
|
||||||
|
]
|
26
.github/workflows/ci-post-merge.yml
vendored
26
.github/workflows/ci-post-merge.yml
vendored
|
@ -44,20 +44,20 @@ jobs:
|
||||||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install Rust (${{ matrix.version.name }})
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
run: cargo ci-check-min
|
run: just check-min
|
||||||
|
|
||||||
- name: check default
|
- name: check default
|
||||||
run: cargo ci-check-default
|
run: just check-default
|
||||||
|
|
||||||
- name: tests
|
- name: tests
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
|
@ -76,16 +76,16 @@ jobs:
|
||||||
- name: Free Disk Space
|
- name: Free Disk Space
|
||||||
run: ./scripts/free-disk-space.sh
|
run: ./scripts/free-disk-space.sh
|
||||||
|
|
||||||
|
- name: Setup mold linker
|
||||||
|
uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install just, cargo-hack
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: cargo-hack
|
tool: just,cargo-hack
|
||||||
|
|
||||||
- name: check feature combinations
|
- name: Check feature combinations
|
||||||
run: cargo ci-check-all-feature-powerset
|
run: just check-feature-combinations
|
||||||
|
|
||||||
- name: check feature combinations
|
|
||||||
run: cargo ci-check-all-feature-powerset-linux
|
|
||||||
|
|
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
|
@ -18,7 +18,7 @@ concurrency:
|
||||||
jobs:
|
jobs:
|
||||||
read_msrv:
|
read_msrv:
|
||||||
name: Read MSRV
|
name: Read MSRV
|
||||||
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@main
|
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
|
||||||
|
|
||||||
build_and_test:
|
build_and_test:
|
||||||
needs: read_msrv
|
needs: read_msrv
|
||||||
|
@ -59,12 +59,12 @@ jobs:
|
||||||
uses: rui314/setup-mold@v1
|
uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
- name: Install Rust (${{ matrix.version.name }})
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
|
@ -73,10 +73,10 @@ jobs:
|
||||||
run: just downgrade-for-msrv
|
run: just downgrade-for-msrv
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
run: cargo ci-check-min
|
run: just check-min
|
||||||
|
|
||||||
- name: check default
|
- name: check default
|
||||||
run: cargo ci-check-default
|
run: just check-default
|
||||||
|
|
||||||
- name: tests
|
- name: tests
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
|
@ -92,7 +92,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
|
|
||||||
|
@ -108,12 +108,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust (nightly)
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
|
|
||||||
- name: Install just
|
- name: Install just
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: just
|
tool: just
|
||||||
|
|
||||||
|
|
17
.github/workflows/coverage.yml
vendored
17
.github/workflows/coverage.yml
vendored
|
@ -17,21 +17,22 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
components: llvm-tools-preview
|
toolchain: nightly
|
||||||
|
components: llvm-tools
|
||||||
|
|
||||||
- name: Install cargo-llvm-cov
|
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: cargo-llvm-cov
|
tool: just,cargo-llvm-cov,cargo-nextest
|
||||||
|
|
||||||
- name: Generate code coverage
|
- name: Generate code coverage
|
||||||
run: cargo llvm-cov --workspace --all-features --codecov --output-path codecov.json
|
run: just test-coverage-codecov
|
||||||
|
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4.3.1
|
uses: codecov/codecov-action@v4.6.0
|
||||||
with:
|
with:
|
||||||
files: codecov.json
|
files: codecov.json
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
|
|
40
.github/workflows/lint.yml
vendored
40
.github/workflows/lint.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust (nightly)
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
|
@ -36,7 +36,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust (nightly)
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
components: rust-docs
|
components: rust-docs
|
||||||
|
@ -65,6 +65,30 @@ jobs:
|
||||||
RUSTDOCFLAGS: -D warnings
|
RUSTDOCFLAGS: -D warnings
|
||||||
run: cargo +nightly doc --no-deps --workspace --all-features
|
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||||
|
|
||||||
|
check-external-types:
|
||||||
|
if: false # disable until https://github.com/awslabs/cargo-check-external-types/pull/177 is marged
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly-2024-05-01)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
|
with:
|
||||||
|
toolchain: nightly-2024-05-01
|
||||||
|
|
||||||
|
- name: Install just
|
||||||
|
uses: taiki-e/install-action@v2.44.60
|
||||||
|
with:
|
||||||
|
tool: just
|
||||||
|
|
||||||
|
- name: Install cargo-check-external-types
|
||||||
|
uses: taiki-e/cache-cargo-install-action@v2.0.1
|
||||||
|
with:
|
||||||
|
tool: cargo-check-external-types
|
||||||
|
|
||||||
|
- name: check external types
|
||||||
|
run: just check-external-types-all +nightly-2024-05-01
|
||||||
|
|
||||||
public-api-diff:
|
public-api-diff:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
@ -76,18 +100,18 @@ jobs:
|
||||||
- name: Checkout PR branch
|
- name: Checkout PR branch
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust (nightly-2024-09-30)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2024-04-26
|
toolchain: nightly-2024-09-30
|
||||||
|
|
||||||
- name: Install cargo-public-api
|
- name: Install cargo-public-api
|
||||||
uses: taiki-e/install-action@v2.33.22
|
uses: taiki-e/install-action@v2.44.60
|
||||||
with:
|
with:
|
||||||
tool: cargo-public-api
|
tool: cargo-public-api
|
||||||
|
|
||||||
- name: Generate API diff
|
- name: Generate API diff
|
||||||
run: |
|
run: |
|
||||||
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
||||||
cargo public-api --manifest-path "$f" diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
cargo public-api --manifest-path "$f" --simplified diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
||||||
done
|
done
|
||||||
|
|
41
.github/workflows/upload-doc.yml
vendored
41
.github/workflows/upload-doc.yml
vendored
|
@ -1,41 +0,0 @@
|
||||||
name: Upload Documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
|
|
||||||
- name: Build Docs
|
|
||||||
run: cargo +nightly doc --no-deps --workspace --all-features
|
|
||||||
env:
|
|
||||||
RUSTDOCFLAGS: --cfg=docsrs
|
|
||||||
|
|
||||||
- name: Tweak HTML
|
|
||||||
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
|
|
||||||
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.6.0
|
|
||||||
with:
|
|
||||||
folder: target/doc
|
|
||||||
single-commit: true
|
|
10
Cargo.toml
10
Cargo.toml
|
@ -19,7 +19,7 @@ homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.72"
|
rust-version = "1.75"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||||
|
@ -51,3 +51,11 @@ awc = { path = "awc" }
|
||||||
# actix-utils = { path = "../actix-net/actix-utils" }
|
# actix-utils = { path = "../actix-net/actix-utils" }
|
||||||
# actix-tls = { path = "../actix-net/actix-tls" }
|
# actix-tls = { path = "../actix-net/actix-tls" }
|
||||||
# actix-server = { path = "../actix-net/actix-server" }
|
# actix-server = { path = "../actix-net/actix-server" }
|
||||||
|
|
||||||
|
[workspace.lints.rust]
|
||||||
|
rust_2018_idioms = { level = "deny" }
|
||||||
|
future_incompatible = { level = "deny" }
|
||||||
|
nonstandard_style = { level = "deny" }
|
||||||
|
|
||||||
|
[workspace.lints.clippy]
|
||||||
|
# clone_on_ref_ptr = { level = "deny" }
|
||||||
|
|
|
@ -2,6 +2,11 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||||
|
|
||||||
|
## 0.6.6
|
||||||
|
|
||||||
|
- Update `tokio-uring` dependency to `0.4`.
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
## 0.6.5
|
## 0.6.5
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-files"
|
name = "actix-files"
|
||||||
version = "0.6.5"
|
version = "0.6.6"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
@ -13,9 +13,14 @@ categories = ["asynchronous", "web-programming::http-server"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_files"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_http::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"http::*",
|
||||||
|
"mime::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||||
|
@ -28,7 +33,7 @@ actix-web = { version = "4", default-features = false }
|
||||||
|
|
||||||
bitflags = "2"
|
bitflags = "2"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = { version = "1", features = ["display", "error", "from"] }
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http-range = "0.1.4"
|
http-range = "0.1.4"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
|
@ -40,8 +45,8 @@ v_htmlescape = "0.15.5"
|
||||||
|
|
||||||
# experimental-io-uring
|
# experimental-io-uring
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
tokio-uring = { version = "0.4", optional = true, features = ["bytes"] }
|
tokio-uring = { version = "0.5", optional = true, features = ["bytes"] }
|
||||||
actix-server = { version = "2.2", optional = true } # ensure matching tokio-uring versions
|
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.7"
|
actix-rt = "2.7"
|
||||||
|
@ -49,3 +54,6 @@ actix-test = "0.1"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
env_logger = "0.11"
|
env_logger = "0.11"
|
||||||
tempfile = "3.2"
|
tempfile = "3.2"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -3,11 +3,11 @@
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
|
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
|
||||||
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.5)](https://docs.rs/actix-files/0.6.5)
|
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.6)](https://docs.rs/actix-files/0.6.6)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![License](https://img.shields.io/crates/l/actix-files.svg)
|
![License](https://img.shields.io/crates/l/actix-files.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-files/0.6.5/status.svg)](https://deps.rs/crate/actix-files/0.6.5)
|
[![dependency status](https://deps.rs/crate/actix-files/0.6.6/status.svg)](https://deps.rs/crate/actix-files/0.6.6)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
|
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
use actix_web::{http::StatusCode, ResponseError};
|
use actix_web::{http::StatusCode, ResponseError};
|
||||||
use derive_more::Display;
|
use derive_more::derive::Display;
|
||||||
|
|
||||||
/// Errors which can occur when serving static files.
|
/// Errors which can occur when serving static files.
|
||||||
#[derive(Debug, PartialEq, Eq, Display)]
|
#[derive(Debug, PartialEq, Eq, Display)]
|
||||||
pub enum FilesError {
|
pub enum FilesError {
|
||||||
/// Path is not a directory.
|
/// Path is not a directory.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "path is not a directory. Unable to serve static files")]
|
#[display("path is not a directory. Unable to serve static files")]
|
||||||
IsNotDirectory,
|
IsNotDirectory,
|
||||||
|
|
||||||
/// Cannot render directory.
|
/// Cannot render directory.
|
||||||
#[display(fmt = "unable to render directory without index file")]
|
#[display("unable to render directory without index file")]
|
||||||
IsDirectory,
|
IsDirectory,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,19 +25,19 @@ impl ResponseError for FilesError {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum UriSegmentError {
|
pub enum UriSegmentError {
|
||||||
/// Segment started with the wrapped invalid character.
|
/// Segment started with the wrapped invalid character.
|
||||||
#[display(fmt = "segment started with invalid character: ('{_0}')")]
|
#[display("segment started with invalid character: ('{_0}')")]
|
||||||
BadStart(char),
|
BadStart(char),
|
||||||
|
|
||||||
/// Segment contained the wrapped invalid character.
|
/// Segment contained the wrapped invalid character.
|
||||||
#[display(fmt = "segment contained invalid character ('{_0}')")]
|
#[display("segment contained invalid character ('{_0}')")]
|
||||||
BadChar(char),
|
BadChar(char),
|
||||||
|
|
||||||
/// Segment ended with the wrapped invalid character.
|
/// Segment ended with the wrapped invalid character.
|
||||||
#[display(fmt = "segment ended with invalid character: ('{_0}')")]
|
#[display("segment ended with invalid character: ('{_0}')")]
|
||||||
BadEnd(char),
|
BadEnd(char),
|
||||||
|
|
||||||
/// Path is not a valid UTF-8 string after percent-decoding.
|
/// Path is not a valid UTF-8 string after percent-decoding.
|
||||||
#[display(fmt = "path is not a valid UTF-8 string after percent-decoding")]
|
#[display("path is not a valid UTF-8 string after percent-decoding")]
|
||||||
NotValidUtf8,
|
NotValidUtf8,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,8 +11,7 @@
|
||||||
//! .service(Files::new("/static", ".").prefer_utf8(true));
|
//! .service(Files::new("/static", ".").prefer_utf8(true));
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![warn(missing_docs, missing_debug_implementations)]
|
||||||
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -307,11 +306,11 @@ mod tests {
|
||||||
let resp = file.respond_to(&req);
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"application/javascript; charset=utf-8"
|
"text/javascript",
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
||||||
"inline; filename=\"test.js\""
|
"inline; filename=\"test.js\"",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ use actix_web::{
|
||||||
Error, HttpMessage, HttpRequest, HttpResponse, Responder,
|
Error, HttpMessage, HttpRequest, HttpResponse, Responder,
|
||||||
};
|
};
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use derive_more::{Deref, DerefMut};
|
use derive_more::derive::{Deref, DerefMut};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use derive_more::Error;
|
use derive_more::derive::Error;
|
||||||
|
|
||||||
/// Copy of `http_range::HttpRangeParseError`.
|
/// Copy of `http_range::HttpRangeParseError`.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|
|
@ -79,7 +79,7 @@ impl FilesService {
|
||||||
|
|
||||||
let (req, _) = req.into_parts();
|
let (req, _) = req.into_parts();
|
||||||
|
|
||||||
(self.renderer)(&dir, &req).unwrap_or_else(|e| ServiceResponse::from_err(e, req))
|
(self.renderer)(&dir, &req).unwrap_or_else(|err| ServiceResponse::from_err(err, req))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,17 @@ edition = "2021"
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
features = []
|
features = []
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http_test"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_server::*",
|
||||||
|
"awc::*",
|
||||||
|
"bytes::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"http::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
@ -51,3 +59,6 @@ tokio = { version = "1.24.2", features = ["sync"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http = "3"
|
actix-http = "3"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
# `actix-http-test`
|
# `actix-http-test`
|
||||||
|
|
||||||
> Various helpers for Actix applications to use during testing.
|
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
|
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
|
||||||
|
@ -14,3 +12,9 @@
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
<!-- prettier-ignore-end -->
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
|
<!-- cargo-rdme start -->
|
||||||
|
|
||||||
|
Various helpers for Actix applications to use during testing.
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
//! Various helpers for Actix applications to use during testing.
|
//! Various helpers for Actix applications to use during testing.
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -108,7 +106,7 @@ pub async fn test_server_with_addr<F: ServerServiceFactory<TcpStream>>(
|
||||||
builder.set_verify(SslVerifyMode::NONE);
|
builder.set_verify(SslVerifyMode::NONE);
|
||||||
let _ = builder
|
let _ = builder
|
||||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
.map_err(|err| log::error!("Can not set ALPN protocol: {err}"));
|
||||||
|
|
||||||
Connector::new()
|
Connector::new()
|
||||||
.conn_lifetime(Duration::from_secs(0))
|
.conn_lifetime(Duration::from_secs(0))
|
||||||
|
|
|
@ -2,6 +2,27 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Add `header::CLEAR_SITE_DATA` constant.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Update `brotli` dependency to `7`.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||||
|
|
||||||
|
## 3.9.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Implement `FromIterator<(HeaderName, HeaderValue)>` for `HeaderMap`.
|
||||||
|
|
||||||
|
## 3.8.0
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Add `error::InvalidStatusCode` re-export.
|
||||||
|
|
||||||
## 3.7.0
|
## 3.7.0
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.7.0"
|
version = "3.9.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
@ -34,51 +34,72 @@ features = [
|
||||||
"compress-zstd",
|
"compress-zstd",
|
||||||
]
|
]
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_tls::*",
|
||||||
|
"actix_utils::*",
|
||||||
|
"bytes::*",
|
||||||
|
"bytestring::*",
|
||||||
|
"encoding_rs::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"h2::*",
|
||||||
|
"http::*",
|
||||||
|
"httparse::*",
|
||||||
|
"language_tags::*",
|
||||||
|
"mime::*",
|
||||||
|
"openssl::*",
|
||||||
|
"rustls::*",
|
||||||
|
"tokio_util::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
|
||||||
# HTTP/2 protocol support
|
# HTTP/2 protocol support
|
||||||
http2 = ["h2"]
|
http2 = ["dep:h2"]
|
||||||
|
|
||||||
# WebSocket protocol implementation
|
# WebSocket protocol implementation
|
||||||
ws = [
|
ws = [
|
||||||
"local-channel",
|
"dep:local-channel",
|
||||||
"base64",
|
"dep:base64",
|
||||||
"rand",
|
"dep:rand",
|
||||||
"sha1",
|
"dep:sha1",
|
||||||
]
|
]
|
||||||
|
|
||||||
# TLS via OpenSSL
|
# TLS via OpenSSL
|
||||||
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
|
||||||
|
|
||||||
# TLS via Rustls v0.20
|
# TLS via Rustls v0.20
|
||||||
rustls = ["rustls-0_20"]
|
rustls = ["__tls", "rustls-0_20"]
|
||||||
|
|
||||||
# TLS via Rustls v0.20
|
# TLS via Rustls v0.20
|
||||||
rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"]
|
rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||||
|
|
||||||
# TLS via Rustls v0.21
|
# TLS via Rustls v0.21
|
||||||
rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"]
|
rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||||
|
|
||||||
# TLS via Rustls v0.22
|
# TLS via Rustls v0.22
|
||||||
rustls-0_22 = ["actix-tls/accept", "actix-tls/rustls-0_22"]
|
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||||
|
|
||||||
# TLS via Rustls v0.23
|
# TLS via Rustls v0.23
|
||||||
rustls-0_23 = ["actix-tls/accept", "actix-tls/rustls-0_23"]
|
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||||
|
|
||||||
# Compression codecs
|
# Compression codecs
|
||||||
compress-brotli = ["__compress", "brotli"]
|
compress-brotli = ["__compress", "dep:brotli"]
|
||||||
compress-gzip = ["__compress", "flate2"]
|
compress-gzip = ["__compress", "dep:flate2"]
|
||||||
compress-zstd = ["__compress", "zstd"]
|
compress-zstd = ["__compress", "dep:zstd"]
|
||||||
|
|
||||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||||
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
||||||
__compress = []
|
__compress = []
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid checking feature status.
|
||||||
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
|
__tls = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-service = "2"
|
actix-service = "2"
|
||||||
actix-codec = "0.5"
|
actix-codec = "0.5"
|
||||||
|
@ -89,7 +110,7 @@ ahash = "0.8"
|
||||||
bitflags = "2"
|
bitflags = "2"
|
||||||
bytes = "1.7"
|
bytes = "1.7"
|
||||||
bytestring = "1"
|
bytestring = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = { version = "1", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http = "0.2.7"
|
http = "0.2.7"
|
||||||
|
@ -106,7 +127,7 @@ tokio-util = { version = "0.7", features = ["io", "codec"] }
|
||||||
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||||
|
|
||||||
# http2
|
# http2
|
||||||
h2 = { version = "0.3.24", optional = true }
|
h2 = { version = "0.3.26", optional = true }
|
||||||
|
|
||||||
# websockets
|
# websockets
|
||||||
local-channel = { version = "0.1", optional = true }
|
local-channel = { version = "0.1", optional = true }
|
||||||
|
@ -118,7 +139,7 @@ sha1 = { version = "0.10", optional = true }
|
||||||
actix-tls = { version = "3.4", default-features = false, optional = true }
|
actix-tls = { version = "3.4", default-features = false, optional = true }
|
||||||
|
|
||||||
# compress-*
|
# compress-*
|
||||||
brotli = { version = "6", optional = true }
|
brotli = { version = "7", optional = true }
|
||||||
flate2 = { version = "1.0.13", optional = true }
|
flate2 = { version = "1.0.13", optional = true }
|
||||||
zstd = { version = "0.13", optional = true }
|
zstd = { version = "0.13", optional = true }
|
||||||
|
|
||||||
|
@ -139,13 +160,16 @@ rcgen = "0.13"
|
||||||
regex = "1.3"
|
regex = "1.3"
|
||||||
rustversion = "1"
|
rustversion = "1"
|
||||||
rustls-pemfile = "2"
|
rustls-pemfile = "2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
static_assertions = "1"
|
static_assertions = "1"
|
||||||
tls-openssl = { package = "openssl", version = "0.10.55" }
|
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||||
tls-rustls_023 = { package = "rustls", version = "0.23" }
|
tls-rustls_023 = { package = "rustls", version = "0.23" }
|
||||||
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
|
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "ws"
|
name = "ws"
|
||||||
required-features = ["ws", "rustls-0_23"]
|
required-features = ["ws", "rustls-0_23"]
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
|
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
|
||||||
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.7.0)](https://docs.rs/actix-http/3.7.0)
|
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.9.0)](https://docs.rs/actix-http/3.9.0)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-http/3.7.0/status.svg)](https://deps.rs/crate/actix-http/3.7.0)
|
[![dependency status](https://deps.rs/crate/actix-http/3.9.0/status.svg)](https://deps.rs/crate/actix-http/3.9.0)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
|
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
|
|
@ -3,13 +3,13 @@ use std::sync::OnceLock;
|
||||||
use actix_http::HttpService;
|
use actix_http::HttpService;
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use actix_service::map_config;
|
use actix_service::map_config;
|
||||||
use actix_web::{dev::AppConfig, get, App};
|
use actix_web::{dev::AppConfig, get, App, Responder};
|
||||||
|
|
||||||
static MEDIUM: OnceLock<String> = OnceLock::new();
|
static MEDIUM: OnceLock<String> = OnceLock::new();
|
||||||
static LARGE: OnceLock<String> = OnceLock::new();
|
static LARGE: OnceLock<String> = OnceLock::new();
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
async fn index() -> &'static str {
|
async fn index() -> impl Responder {
|
||||||
"Hello, world. From Actix Web!"
|
"Hello, world. From Actix Web!"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ async fn main() -> io::Result<()> {
|
||||||
body.extend_from_slice(&item?);
|
body.extend_from_slice(&item?);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("request body: {:?}", body);
|
info!("request body: {body:?}");
|
||||||
|
|
||||||
let res = Response::build(StatusCode::OK)
|
let res = Response::build(StatusCode::OK)
|
||||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
||||||
|
@ -31,8 +31,7 @@ async fn main() -> io::Result<()> {
|
||||||
|
|
||||||
Ok::<_, Error>(res)
|
Ok::<_, Error>(res)
|
||||||
})
|
})
|
||||||
// No TLS
|
.tcp() // No TLS
|
||||||
.tcp()
|
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
|
|
|
@ -17,7 +17,7 @@ async fn main() -> io::Result<()> {
|
||||||
ext.insert(42u32);
|
ext.insert(42u32);
|
||||||
})
|
})
|
||||||
.finish(|req: Request| async move {
|
.finish(|req: Request| async move {
|
||||||
info!("{:?}", req);
|
info!("{req:?}");
|
||||||
|
|
||||||
let mut res = Response::build(StatusCode::OK);
|
let mut res = Response::build(StatusCode::OK);
|
||||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||||
|
|
|
@ -22,16 +22,16 @@ async fn main() -> io::Result<()> {
|
||||||
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.finish(|req| async move {
|
.finish(|req| async move {
|
||||||
info!("{:?}", req);
|
info!("{req:?}");
|
||||||
let res = Response::ok();
|
let res = Response::ok();
|
||||||
|
|
||||||
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
||||||
yield Ok(Bytes::from("123"));
|
yield Ok(Bytes::from("123"));
|
||||||
yield Ok(Bytes::from("456"));
|
yield Ok(Bytes::from("456"));
|
||||||
|
|
||||||
actix_rt::time::sleep(Duration::from_millis(1000)).await;
|
actix_rt::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
|
||||||
yield Err(io::Error::new(io::ErrorKind::Other, ""));
|
yield Err(io::Error::new(io::ErrorKind::Other, "abc"));
|
||||||
})))
|
})))
|
||||||
})
|
})
|
||||||
.tcp()
|
.tcp()
|
||||||
|
|
|
@ -17,7 +17,6 @@ use bytes::{Bytes, BytesMut};
|
||||||
use bytestring::ByteString;
|
use bytestring::ByteString;
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
use tokio_util::codec::Encoder;
|
use tokio_util::codec::Encoder;
|
||||||
use tracing::{info, trace};
|
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
|
@ -37,12 +36,12 @@ async fn main() -> io::Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
||||||
info!("handshaking");
|
tracing::info!("handshaking");
|
||||||
let mut res = ws::handshake(req.head())?;
|
let mut res = ws::handshake(req.head())?;
|
||||||
|
|
||||||
// handshake will always fail under HTTP/2
|
// handshake will always fail under HTTP/2
|
||||||
|
|
||||||
info!("responding");
|
tracing::info!("responding");
|
||||||
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +63,7 @@ impl Stream for Heartbeat {
|
||||||
type Item = Result<Bytes, Error>;
|
type Item = Result<Bytes, Error>;
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
trace!("poll");
|
tracing::trace!("poll");
|
||||||
|
|
||||||
ready!(self.as_mut().interval.poll_tick(cx));
|
ready!(self.as_mut().interval.poll_tick(cx));
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ mod tests {
|
||||||
time::{sleep, Sleep},
|
time::{sleep, Sleep},
|
||||||
};
|
};
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
use futures_util::{stream, FutureExt as _};
|
use futures_util::{stream, FutureExt as _};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
@ -131,7 +131,7 @@ mod tests {
|
||||||
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
||||||
}
|
}
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "stream error")]
|
#[display("stream error")]
|
||||||
struct StreamErr;
|
struct StreamErr;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::task::Poll;
|
||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
|
|
||||||
use super::{BodySize, MessageBody};
|
use super::{BodySize, MessageBody};
|
||||||
|
@ -38,7 +38,7 @@ pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
||||||
|
|
||||||
/// Error type returned from [`to_bytes_limited`] when body produced exceeds limit.
|
/// Error type returned from [`to_bytes_limited`] when body produced exceeds limit.
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "limit exceeded while collecting body bytes")]
|
#[display("limit exceeded while collecting body bytes")]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct BodyLimitExceeded;
|
pub struct BodyLimitExceeded;
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ use std::{
|
||||||
|
|
||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::Display;
|
use derive_more::derive::Display;
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
|
@ -415,11 +415,11 @@ fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum EncoderError {
|
pub enum EncoderError {
|
||||||
/// Wrapped body stream error.
|
/// Wrapped body stream error.
|
||||||
#[display(fmt = "body")]
|
#[display("body")]
|
||||||
Body(Box<dyn StdError>),
|
Body(Box<dyn StdError>),
|
||||||
|
|
||||||
/// Generic I/O error.
|
/// Generic I/O error.
|
||||||
#[display(fmt = "io")]
|
#[display("io")]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::derive::{Display, Error, From};
|
||||||
pub use http::Error as HttpError;
|
pub use http::{status::InvalidStatusCode, Error as HttpError};
|
||||||
use http::{uri::InvalidUri, StatusCode};
|
use http::{uri::InvalidUri, StatusCode};
|
||||||
|
|
||||||
use crate::{body::BoxBody, Response};
|
use crate::{body::BoxBody, Response};
|
||||||
|
@ -80,28 +80,28 @@ impl From<Error> for Response<BoxBody> {
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
||||||
pub(crate) enum Kind {
|
pub(crate) enum Kind {
|
||||||
#[display(fmt = "error processing HTTP")]
|
#[display("error processing HTTP")]
|
||||||
Http,
|
Http,
|
||||||
|
|
||||||
#[display(fmt = "error parsing HTTP message")]
|
#[display("error parsing HTTP message")]
|
||||||
Parse,
|
Parse,
|
||||||
|
|
||||||
#[display(fmt = "request payload read error")]
|
#[display("request payload read error")]
|
||||||
Payload,
|
Payload,
|
||||||
|
|
||||||
#[display(fmt = "response body write error")]
|
#[display("response body write error")]
|
||||||
Body,
|
Body,
|
||||||
|
|
||||||
#[display(fmt = "send response error")]
|
#[display("send response error")]
|
||||||
SendResponse,
|
SendResponse,
|
||||||
|
|
||||||
#[display(fmt = "error in WebSocket process")]
|
#[display("error in WebSocket process")]
|
||||||
Ws,
|
Ws,
|
||||||
|
|
||||||
#[display(fmt = "connection error")]
|
#[display("connection error")]
|
||||||
Io,
|
Io,
|
||||||
|
|
||||||
#[display(fmt = "encoder error")]
|
#[display("encoder error")]
|
||||||
Encoder,
|
Encoder,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,44 +160,44 @@ impl From<crate::ws::ProtocolError> for Error {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
/// An invalid `Method`, such as `GE.T`.
|
/// An invalid `Method`, such as `GE.T`.
|
||||||
#[display(fmt = "invalid method specified")]
|
#[display("invalid method specified")]
|
||||||
Method,
|
Method,
|
||||||
|
|
||||||
/// An invalid `Uri`, such as `exam ple.domain`.
|
/// An invalid `Uri`, such as `exam ple.domain`.
|
||||||
#[display(fmt = "URI error: {}", _0)]
|
#[display("URI error: {}", _0)]
|
||||||
Uri(InvalidUri),
|
Uri(InvalidUri),
|
||||||
|
|
||||||
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
||||||
#[display(fmt = "invalid HTTP version specified")]
|
#[display("invalid HTTP version specified")]
|
||||||
Version,
|
Version,
|
||||||
|
|
||||||
/// An invalid `Header`.
|
/// An invalid `Header`.
|
||||||
#[display(fmt = "invalid Header provided")]
|
#[display("invalid Header provided")]
|
||||||
Header,
|
Header,
|
||||||
|
|
||||||
/// A message head is too large to be reasonable.
|
/// A message head is too large to be reasonable.
|
||||||
#[display(fmt = "message head is too large")]
|
#[display("message head is too large")]
|
||||||
TooLarge,
|
TooLarge,
|
||||||
|
|
||||||
/// A message reached EOF, but is not complete.
|
/// A message reached EOF, but is not complete.
|
||||||
#[display(fmt = "message is incomplete")]
|
#[display("message is incomplete")]
|
||||||
Incomplete,
|
Incomplete,
|
||||||
|
|
||||||
/// An invalid `Status`, such as `1337 ELITE`.
|
/// An invalid `Status`, such as `1337 ELITE`.
|
||||||
#[display(fmt = "invalid status provided")]
|
#[display("invalid status provided")]
|
||||||
Status,
|
Status,
|
||||||
|
|
||||||
/// A timeout occurred waiting for an IO event.
|
/// A timeout occurred waiting for an IO event.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "timeout")]
|
#[display("timeout")]
|
||||||
Timeout,
|
Timeout,
|
||||||
|
|
||||||
/// An I/O error that occurred while trying to read or write to a network stream.
|
/// An I/O error that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "I/O error: {}", _0)]
|
#[display("I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Parsing a field as string failed.
|
/// Parsing a field as string failed.
|
||||||
#[display(fmt = "UTF-8 error: {}", _0)]
|
#[display("UTF-8 error: {}", _0)]
|
||||||
Utf8(Utf8Error),
|
Utf8(Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,28 +256,28 @@ impl From<ParseError> for Response<BoxBody> {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum PayloadError {
|
pub enum PayloadError {
|
||||||
/// A payload reached EOF, but is not complete.
|
/// A payload reached EOF, but is not complete.
|
||||||
#[display(fmt = "payload reached EOF before completing: {:?}", _0)]
|
#[display("payload reached EOF before completing: {:?}", _0)]
|
||||||
Incomplete(Option<io::Error>),
|
Incomplete(Option<io::Error>),
|
||||||
|
|
||||||
/// Content encoding stream corruption.
|
/// Content encoding stream corruption.
|
||||||
#[display(fmt = "can not decode content-encoding")]
|
#[display("can not decode content-encoding")]
|
||||||
EncodingCorrupted,
|
EncodingCorrupted,
|
||||||
|
|
||||||
/// Payload reached size limit.
|
/// Payload reached size limit.
|
||||||
#[display(fmt = "payload reached size limit")]
|
#[display("payload reached size limit")]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|
||||||
/// Payload length is unknown.
|
/// Payload length is unknown.
|
||||||
#[display(fmt = "payload length is unknown")]
|
#[display("payload length is unknown")]
|
||||||
UnknownLength,
|
UnknownLength,
|
||||||
|
|
||||||
/// HTTP/2 payload error.
|
/// HTTP/2 payload error.
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("{}", _0)]
|
||||||
Http2Payload(::h2::Error),
|
Http2Payload(::h2::Error),
|
||||||
|
|
||||||
/// Generic I/O error.
|
/// Generic I/O error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("{}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -326,44 +326,44 @@ impl From<PayloadError> for Error {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum DispatchError {
|
pub enum DispatchError {
|
||||||
/// Service error.
|
/// Service error.
|
||||||
#[display(fmt = "service error")]
|
#[display("service error")]
|
||||||
Service(Response<BoxBody>),
|
Service(Response<BoxBody>),
|
||||||
|
|
||||||
/// Body streaming error.
|
/// Body streaming error.
|
||||||
#[display(fmt = "body error: {}", _0)]
|
#[display("body error: {}", _0)]
|
||||||
Body(Box<dyn StdError>),
|
Body(Box<dyn StdError>),
|
||||||
|
|
||||||
/// Upgrade service error.
|
/// Upgrade service error.
|
||||||
#[display(fmt = "upgrade error")]
|
#[display("upgrade error")]
|
||||||
Upgrade,
|
Upgrade,
|
||||||
|
|
||||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "I/O error: {}", _0)]
|
#[display("I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Request parse error.
|
/// Request parse error.
|
||||||
#[display(fmt = "request parse error: {}", _0)]
|
#[display("request parse error: {}", _0)]
|
||||||
Parse(ParseError),
|
Parse(ParseError),
|
||||||
|
|
||||||
/// HTTP/2 error.
|
/// HTTP/2 error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("{}", _0)]
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
H2(h2::Error),
|
H2(h2::Error),
|
||||||
|
|
||||||
/// The first request did not complete within the specified timeout.
|
/// The first request did not complete within the specified timeout.
|
||||||
#[display(fmt = "request did not complete within the specified timeout")]
|
#[display("request did not complete within the specified timeout")]
|
||||||
SlowRequestTimeout,
|
SlowRequestTimeout,
|
||||||
|
|
||||||
/// Disconnect timeout. Makes sense for TLS streams.
|
/// Disconnect timeout. Makes sense for TLS streams.
|
||||||
#[display(fmt = "connection shutdown timeout")]
|
#[display("connection shutdown timeout")]
|
||||||
DisconnectTimeout,
|
DisconnectTimeout,
|
||||||
|
|
||||||
/// Handler dropped payload before reading EOF.
|
/// Handler dropped payload before reading EOF.
|
||||||
#[display(fmt = "handler dropped payload before reading EOF")]
|
#[display("handler dropped payload before reading EOF")]
|
||||||
HandlerDroppedPayload,
|
HandlerDroppedPayload,
|
||||||
|
|
||||||
/// Internal error.
|
/// Internal error.
|
||||||
#[display(fmt = "internal error")]
|
#[display("internal error")]
|
||||||
InternalError,
|
InternalError,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -389,11 +389,11 @@ impl StdError for DispatchError {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ContentTypeError {
|
pub enum ContentTypeError {
|
||||||
/// Can not parse content type.
|
/// Can not parse content type.
|
||||||
#[display(fmt = "could not parse content type")]
|
#[display("could not parse content type")]
|
||||||
ParseError,
|
ParseError,
|
||||||
|
|
||||||
/// Unknown content encoding.
|
/// Unknown content encoding.
|
||||||
#[display(fmt = "unknown content encoding")]
|
#[display("unknown content encoding")]
|
||||||
UnknownEncoding,
|
UnknownEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -314,7 +314,7 @@ impl MessageType for RequestHeadType {
|
||||||
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
|
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
|
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -499,7 +499,7 @@ impl TransferEncoding {
|
||||||
buf.extend_from_slice(b"0\r\n\r\n");
|
buf.extend_from_slice(b"0\r\n\r\n");
|
||||||
} else {
|
} else {
|
||||||
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
|
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
|
||||||
|
|
||||||
buf.reserve(msg.len() + 2);
|
buf.reserve(msg.len() + 2);
|
||||||
buf.extend_from_slice(msg);
|
buf.extend_from_slice(msg);
|
||||||
|
|
|
@ -480,15 +480,15 @@ where
|
||||||
let cfg = self.cfg.clone();
|
let cfg = self.cfg.clone();
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let expect = expect
|
let expect = expect.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP expect service error: {err:?}");
|
||||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
})?;
|
||||||
|
|
||||||
let upgrade = match upgrade {
|
let upgrade = match upgrade {
|
||||||
Some(upgrade) => {
|
Some(upgrade) => {
|
||||||
let upgrade = upgrade
|
let upgrade = upgrade.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP upgrade service error: {err:?}");
|
||||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
})?;
|
||||||
Some(upgrade)
|
Some(upgrade)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
|
@ -496,7 +496,7 @@ where
|
||||||
|
|
||||||
let service = service
|
let service = service
|
||||||
.await
|
.await
|
||||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
.map_err(|err| error!("Initialization of HTTP service error: {err:?}"))?;
|
||||||
|
|
||||||
Ok(H1ServiceHandler::new(
|
Ok(H1ServiceHandler::new(
|
||||||
cfg,
|
cfg,
|
||||||
|
@ -541,6 +541,6 @@ where
|
||||||
|
|
||||||
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
|
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
|
||||||
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
||||||
Dispatcher::new(io, self.flow.clone(), self.cfg.clone(), addr, conn_data)
|
Dispatcher::new(io, Rc::clone(&self.flow), self.cfg.clone(), addr, conn_data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -434,7 +434,7 @@ where
|
||||||
|
|
||||||
H2ServiceHandlerResponse {
|
H2ServiceHandlerResponse {
|
||||||
state: State::Handshake(
|
state: State::Handshake(
|
||||||
Some(self.flow.clone()),
|
Some(Rc::clone(&self.flow)),
|
||||||
Some(self.cfg.clone()),
|
Some(self.cfg.clone()),
|
||||||
addr,
|
addr,
|
||||||
on_connect_data,
|
on_connect_data,
|
||||||
|
|
|
@ -18,6 +18,14 @@ pub const CACHE_STATUS: HeaderName = HeaderName::from_static("cache-status");
|
||||||
// TODO(breaking): replace with http's version
|
// TODO(breaking): replace with http's version
|
||||||
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
|
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
|
||||||
|
|
||||||
|
/// Response header field that sends a signal to the user agent that it ought to remove all data of
|
||||||
|
/// a certain set of types.
|
||||||
|
///
|
||||||
|
/// See the [W3C Clear-Site-Data spec] for full semantics.
|
||||||
|
///
|
||||||
|
/// [W3C Clear-Site-Data spec]: https://www.w3.org/TR/clear-site-data/#header
|
||||||
|
pub const CLEAR_SITE_DATA: HeaderName = HeaderName::from_static("clear-site-data");
|
||||||
|
|
||||||
/// Response header that prevents a document from loading any cross-origin resources that don't
|
/// Response header that prevents a document from loading any cross-origin resources that don't
|
||||||
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
||||||
///
|
///
|
||||||
|
|
|
@ -13,8 +13,9 @@ use super::AsHeaderName;
|
||||||
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
|
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_http::header::{self, HeaderMap, HeaderValue};
|
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
|
||||||
///
|
///
|
||||||
/// let mut map = HeaderMap::new();
|
/// let mut map = HeaderMap::new();
|
||||||
///
|
///
|
||||||
|
@ -29,6 +30,21 @@ use super::AsHeaderName;
|
||||||
///
|
///
|
||||||
/// assert!(!map.contains_key(header::ORIGIN));
|
/// assert!(!map.contains_key(header::ORIGIN));
|
||||||
/// ```
|
/// ```
|
||||||
|
///
|
||||||
|
/// Construct a header map using the [`FromIterator`] implementation. Note that it uses the append
|
||||||
|
/// strategy, so duplicate header names are preserved.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use actix_http::header::{self, HeaderMap, HeaderValue};
|
||||||
|
///
|
||||||
|
/// let headers = HeaderMap::from_iter([
|
||||||
|
/// (header::CONTENT_TYPE, HeaderValue::from_static("text/plain")),
|
||||||
|
/// (header::COOKIE, HeaderValue::from_static("foo=1")),
|
||||||
|
/// (header::COOKIE, HeaderValue::from_static("bar=1")),
|
||||||
|
/// ]);
|
||||||
|
///
|
||||||
|
/// assert_eq!(headers.len(), 3);
|
||||||
|
/// ```
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct HeaderMap {
|
pub struct HeaderMap {
|
||||||
pub(crate) inner: AHashMap<HeaderName, Value>,
|
pub(crate) inner: AHashMap<HeaderName, Value>,
|
||||||
|
@ -368,8 +384,8 @@ impl HeaderMap {
|
||||||
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
||||||
/// assert!(!removed.is_empty());
|
/// assert!(!removed.is_empty());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn insert(&mut self, key: HeaderName, val: HeaderValue) -> Removed {
|
pub fn insert(&mut self, name: HeaderName, val: HeaderValue) -> Removed {
|
||||||
let value = self.inner.insert(key, Value::one(val));
|
let value = self.inner.insert(name, Value::one(val));
|
||||||
Removed::new(value)
|
Removed::new(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -636,6 +652,16 @@ impl<'a> IntoIterator for &'a HeaderMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FromIterator<(HeaderName, HeaderValue)> for HeaderMap {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (HeaderName, HeaderValue)>>(iter: T) -> Self {
|
||||||
|
iter.into_iter()
|
||||||
|
.fold(Self::new(), |mut map, (name, value)| {
|
||||||
|
map.append(name, value);
|
||||||
|
map
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Convert a `http::HeaderMap` to our `HeaderMap`.
|
/// Convert a `http::HeaderMap` to our `HeaderMap`.
|
||||||
impl From<http::HeaderMap> for HeaderMap {
|
impl From<http::HeaderMap> for HeaderMap {
|
||||||
fn from(mut map: http::HeaderMap) -> Self {
|
fn from(mut map: http::HeaderMap) -> Self {
|
||||||
|
|
|
@ -42,9 +42,9 @@ pub use self::{
|
||||||
as_name::AsHeaderName,
|
as_name::AsHeaderName,
|
||||||
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
||||||
common::{
|
common::{
|
||||||
CACHE_STATUS, CDN_CACHE_CONTROL, CROSS_ORIGIN_EMBEDDER_POLICY, CROSS_ORIGIN_OPENER_POLICY,
|
CACHE_STATUS, CDN_CACHE_CONTROL, CLEAR_SITE_DATA, CROSS_ORIGIN_EMBEDDER_POLICY,
|
||||||
CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY, X_FORWARDED_FOR, X_FORWARDED_HOST,
|
CROSS_ORIGIN_OPENER_POLICY, CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY,
|
||||||
X_FORWARDED_PROTO,
|
X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO,
|
||||||
},
|
},
|
||||||
into_pair::TryIntoHeaderPair,
|
into_pair::TryIntoHeaderPair,
|
||||||
into_value::TryIntoHeaderValue,
|
into_value::TryIntoHeaderValue,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use http::header::InvalidHeaderValue;
|
use http::header::InvalidHeaderValue;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
|
|
||||||
/// Error returned when a content encoding is unknown.
|
/// Error returned when a content encoding is unknown.
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "unsupported content encoding")]
|
#[display("unsupported content encoding")]
|
||||||
pub struct ContentEncodingParseError;
|
pub struct ContentEncodingParseError;
|
||||||
|
|
||||||
/// Represents a supported content encoding.
|
/// Represents a supported content encoding.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
|
|
||||||
const MAX_QUALITY_INT: u16 = 1000;
|
const MAX_QUALITY_INT: u16 = 1000;
|
||||||
const MAX_QUALITY_FLOAT: f32 = 1.0;
|
const MAX_QUALITY_FLOAT: f32 = 1.0;
|
||||||
|
@ -125,7 +125,7 @@ pub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Re
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Display, Error)]
|
#[derive(Debug, Clone, Display, Error)]
|
||||||
#[display(fmt = "quality out of bounds")]
|
#[display("quality out of bounds")]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct QualityOutOfBounds;
|
pub struct QualityOutOfBounds;
|
||||||
|
|
||||||
|
|
|
@ -6,10 +6,10 @@
|
||||||
//! | ------------------- | ------------------------------------------- |
|
//! | ------------------- | ------------------------------------------- |
|
||||||
//! | `http2` | HTTP/2 support via [h2]. |
|
//! | `http2` | HTTP/2 support via [h2]. |
|
||||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||||
//! | `rustls` | TLS support via [rustls] 0.20. |
|
//! | `rustls-0_20` | TLS support via rustls 0.20. |
|
||||||
//! | `rustls-0_21` | TLS support via [rustls] 0.21. |
|
//! | `rustls-0_21` | TLS support via rustls 0.21. |
|
||||||
//! | `rustls-0_22` | TLS support via [rustls] 0.22. |
|
//! | `rustls-0_22` | TLS support via rustls 0.22. |
|
||||||
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
|
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
|
||||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||||
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||||
|
@ -20,8 +20,6 @@
|
||||||
//! [rustls]: https://crates.io/crates/rustls
|
//! [rustls]: https://crates.io/crates/rustls
|
||||||
//! [trust-dns]: https://crates.io/crates/trust-dns
|
//! [trust-dns]: https://crates.io/crates/trust-dns
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![allow(
|
#![allow(
|
||||||
clippy::type_complexity,
|
clippy::type_complexity,
|
||||||
clippy::too_many_arguments,
|
clippy::too_many_arguments,
|
||||||
|
@ -62,13 +60,7 @@ pub mod ws;
|
||||||
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
pub use self::payload::PayloadStream;
|
pub use self::payload::PayloadStream;
|
||||||
#[cfg(any(
|
#[cfg(feature = "__tls")]
|
||||||
feature = "openssl",
|
|
||||||
feature = "rustls-0_20",
|
|
||||||
feature = "rustls-0_21",
|
|
||||||
feature = "rustls-0_22",
|
|
||||||
feature = "rustls-0_23",
|
|
||||||
))]
|
|
||||||
pub use self::service::TlsAcceptorConfig;
|
pub use self::service::TlsAcceptorConfig;
|
||||||
pub use self::{
|
pub use self::{
|
||||||
builder::HttpServiceBuilder,
|
builder::HttpServiceBuilder,
|
||||||
|
|
|
@ -66,7 +66,7 @@ impl<T: Head> ops::DerefMut for Message<T> {
|
||||||
|
|
||||||
impl<T: Head> Drop for Message<T> {
|
impl<T: Head> Drop for Message<T> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
T::with_pool(|p| p.release(self.head.clone()))
|
T::with_pool(|p| p.release(Rc::clone(&self.head)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -351,12 +351,9 @@ mod tests {
|
||||||
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
|
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
|
||||||
|
|
||||||
let resp = Response::build(StatusCode::OK)
|
let resp = Response::build(StatusCode::OK)
|
||||||
.content_type(mime::APPLICATION_JAVASCRIPT_UTF_8)
|
.content_type(mime::TEXT_JAVASCRIPT)
|
||||||
.body(Bytes::new());
|
.body(Bytes::new());
|
||||||
assert_eq!(
|
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript");
|
||||||
resp.headers().get(CONTENT_TYPE).unwrap(),
|
|
||||||
"application/javascript; charset=utf-8"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -241,25 +241,13 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configuration options used when accepting TLS connection.
|
/// Configuration options used when accepting TLS connection.
|
||||||
#[cfg(any(
|
#[cfg(feature = "__tls")]
|
||||||
feature = "openssl",
|
|
||||||
feature = "rustls-0_20",
|
|
||||||
feature = "rustls-0_21",
|
|
||||||
feature = "rustls-0_22",
|
|
||||||
feature = "rustls-0_23",
|
|
||||||
))]
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct TlsAcceptorConfig {
|
pub struct TlsAcceptorConfig {
|
||||||
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(
|
#[cfg(feature = "__tls")]
|
||||||
feature = "openssl",
|
|
||||||
feature = "rustls-0_20",
|
|
||||||
feature = "rustls-0_21",
|
|
||||||
feature = "rustls-0_22",
|
|
||||||
feature = "rustls-0_23",
|
|
||||||
))]
|
|
||||||
impl TlsAcceptorConfig {
|
impl TlsAcceptorConfig {
|
||||||
/// Set TLS handshake timeout duration.
|
/// Set TLS handshake timeout duration.
|
||||||
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
||||||
|
@ -787,23 +775,23 @@ where
|
||||||
let cfg = self.cfg.clone();
|
let cfg = self.cfg.clone();
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let expect = expect
|
let expect = expect.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP expect service error: {err:?}");
|
||||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
})?;
|
||||||
|
|
||||||
let upgrade = match upgrade {
|
let upgrade = match upgrade {
|
||||||
Some(upgrade) => {
|
Some(upgrade) => {
|
||||||
let upgrade = upgrade
|
let upgrade = upgrade.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP upgrade service error: {err:?}");
|
||||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
})?;
|
||||||
Some(upgrade)
|
Some(upgrade)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let service = service
|
let service = service.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP service error: {err:?}");
|
||||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
})?;
|
||||||
|
|
||||||
Ok(HttpServiceHandler::new(
|
Ok(HttpServiceHandler::new(
|
||||||
cfg,
|
cfg,
|
||||||
|
@ -922,7 +910,7 @@ where
|
||||||
handshake: Some((
|
handshake: Some((
|
||||||
crate::h2::handshake_with_timeout(io, &self.cfg),
|
crate::h2::handshake_with_timeout(io, &self.cfg),
|
||||||
self.cfg.clone(),
|
self.cfg.clone(),
|
||||||
self.flow.clone(),
|
Rc::clone(&self.flow),
|
||||||
conn_data,
|
conn_data,
|
||||||
peer_addr,
|
peer_addr,
|
||||||
)),
|
)),
|
||||||
|
@ -938,7 +926,7 @@ where
|
||||||
state: State::H1 {
|
state: State::H1 {
|
||||||
dispatcher: h1::Dispatcher::new(
|
dispatcher: h1::Dispatcher::new(
|
||||||
io,
|
io,
|
||||||
self.flow.clone(),
|
Rc::clone(&self.flow),
|
||||||
self.cfg.clone(),
|
self.cfg.clone(),
|
||||||
peer_addr,
|
peer_addr,
|
||||||
conn_data,
|
conn_data,
|
||||||
|
|
|
@ -159,8 +159,8 @@ impl TestBuffer {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn clone(&self) -> Self {
|
pub(crate) fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
read_buf: self.read_buf.clone(),
|
read_buf: Rc::clone(&self.read_buf),
|
||||||
write_buf: self.write_buf.clone(),
|
write_buf: Rc::clone(&self.write_buf),
|
||||||
err: self.err.clone(),
|
err: self.err.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,14 +114,14 @@ mod inner {
|
||||||
{
|
{
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
DispatcherError::Service(ref e) => {
|
DispatcherError::Service(ref err) => {
|
||||||
write!(fmt, "DispatcherError::Service({:?})", e)
|
write!(fmt, "DispatcherError::Service({err:?})")
|
||||||
}
|
}
|
||||||
DispatcherError::Encoder(ref e) => {
|
DispatcherError::Encoder(ref err) => {
|
||||||
write!(fmt, "DispatcherError::Encoder({:?})", e)
|
write!(fmt, "DispatcherError::Encoder({err:?})")
|
||||||
}
|
}
|
||||||
DispatcherError::Decoder(ref e) => {
|
DispatcherError::Decoder(ref err) => {
|
||||||
write!(fmt, "DispatcherError::Decoder({:?})", e)
|
write!(fmt, "DispatcherError::Decoder({err:?})")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -136,9 +136,9 @@ mod inner {
|
||||||
{
|
{
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
DispatcherError::Service(ref e) => write!(fmt, "{}", e),
|
DispatcherError::Service(ref err) => write!(fmt, "{err}"),
|
||||||
DispatcherError::Encoder(ref e) => write!(fmt, "{:?}", e),
|
DispatcherError::Encoder(ref err) => write!(fmt, "{err:?}"),
|
||||||
DispatcherError::Decoder(ref e) => write!(fmt, "{:?}", e),
|
DispatcherError::Decoder(ref err) => write!(fmt, "{err:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,17 +178,14 @@ impl Parser {
|
||||||
};
|
};
|
||||||
|
|
||||||
if payload_len < 126 {
|
if payload_len < 126 {
|
||||||
dst.buffer_mut()
|
dst.buffer_mut().reserve(p_len + 2);
|
||||||
.reserve(p_len + 2 + if mask { 4 } else { 0 });
|
|
||||||
dst.buffer_mut().put_slice(&[one, two | payload_len as u8]);
|
dst.buffer_mut().put_slice(&[one, two | payload_len as u8]);
|
||||||
} else if payload_len <= 65_535 {
|
} else if payload_len <= 65_535 {
|
||||||
dst.buffer_mut()
|
dst.buffer_mut().reserve(p_len + 4);
|
||||||
.reserve(p_len + 4 + if mask { 4 } else { 0 });
|
|
||||||
dst.buffer_mut().put_slice(&[one, two | 126]);
|
dst.buffer_mut().put_slice(&[one, two | 126]);
|
||||||
dst.buffer_mut().put_u16(payload_len as u16);
|
dst.buffer_mut().put_u16(payload_len as u16);
|
||||||
} else {
|
} else {
|
||||||
dst.buffer_mut()
|
dst.buffer_mut().reserve(p_len + 10);
|
||||||
.reserve(p_len + 10 + if mask { 4 } else { 0 });
|
|
||||||
dst.buffer_mut().put_slice(&[one, two | 127]);
|
dst.buffer_mut().put_slice(&[one, two | 127]);
|
||||||
dst.buffer_mut().put_u64(payload_len as u64);
|
dst.buffer_mut().put_u64(payload_len as u64);
|
||||||
};
|
};
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::derive::{Display, Error, From};
|
||||||
use http::{header, Method, StatusCode};
|
use http::{header, Method, StatusCode};
|
||||||
|
|
||||||
use crate::{body::BoxBody, header::HeaderValue, RequestHead, Response, ResponseBuilder};
|
use crate::{body::BoxBody, header::HeaderValue, RequestHead, Response, ResponseBuilder};
|
||||||
|
@ -27,43 +27,43 @@ pub use self::{
|
||||||
#[derive(Debug, Display, Error, From)]
|
#[derive(Debug, Display, Error, From)]
|
||||||
pub enum ProtocolError {
|
pub enum ProtocolError {
|
||||||
/// Received an unmasked frame from client.
|
/// Received an unmasked frame from client.
|
||||||
#[display(fmt = "received an unmasked frame from client")]
|
#[display("received an unmasked frame from client")]
|
||||||
UnmaskedFrame,
|
UnmaskedFrame,
|
||||||
|
|
||||||
/// Received a masked frame from server.
|
/// Received a masked frame from server.
|
||||||
#[display(fmt = "received a masked frame from server")]
|
#[display("received a masked frame from server")]
|
||||||
MaskedFrame,
|
MaskedFrame,
|
||||||
|
|
||||||
/// Encountered invalid opcode.
|
/// Encountered invalid opcode.
|
||||||
#[display(fmt = "invalid opcode ({})", _0)]
|
#[display("invalid opcode ({})", _0)]
|
||||||
InvalidOpcode(#[error(not(source))] u8),
|
InvalidOpcode(#[error(not(source))] u8),
|
||||||
|
|
||||||
/// Invalid control frame length
|
/// Invalid control frame length
|
||||||
#[display(fmt = "invalid control frame length ({})", _0)]
|
#[display("invalid control frame length ({})", _0)]
|
||||||
InvalidLength(#[error(not(source))] usize),
|
InvalidLength(#[error(not(source))] usize),
|
||||||
|
|
||||||
/// Bad opcode.
|
/// Bad opcode.
|
||||||
#[display(fmt = "bad opcode")]
|
#[display("bad opcode")]
|
||||||
BadOpCode,
|
BadOpCode,
|
||||||
|
|
||||||
/// A payload reached size limit.
|
/// A payload reached size limit.
|
||||||
#[display(fmt = "payload reached size limit")]
|
#[display("payload reached size limit")]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|
||||||
/// Continuation has not started.
|
/// Continuation has not started.
|
||||||
#[display(fmt = "continuation has not started")]
|
#[display("continuation has not started")]
|
||||||
ContinuationNotStarted,
|
ContinuationNotStarted,
|
||||||
|
|
||||||
/// Received new continuation but it is already started.
|
/// Received new continuation but it is already started.
|
||||||
#[display(fmt = "received new continuation but it has already started")]
|
#[display("received new continuation but it has already started")]
|
||||||
ContinuationStarted,
|
ContinuationStarted,
|
||||||
|
|
||||||
/// Unknown continuation fragment.
|
/// Unknown continuation fragment.
|
||||||
#[display(fmt = "unknown continuation fragment: {}", _0)]
|
#[display("unknown continuation fragment: {}", _0)]
|
||||||
ContinuationFragment(#[error(not(source))] OpCode),
|
ContinuationFragment(#[error(not(source))] OpCode),
|
||||||
|
|
||||||
/// I/O error.
|
/// I/O error.
|
||||||
#[display(fmt = "I/O error: {}", _0)]
|
#[display("I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,27 +71,27 @@ pub enum ProtocolError {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
||||||
pub enum HandshakeError {
|
pub enum HandshakeError {
|
||||||
/// Only get method is allowed.
|
/// Only get method is allowed.
|
||||||
#[display(fmt = "method not allowed")]
|
#[display("method not allowed")]
|
||||||
GetMethodRequired,
|
GetMethodRequired,
|
||||||
|
|
||||||
/// Upgrade header if not set to WebSocket.
|
/// Upgrade header if not set to WebSocket.
|
||||||
#[display(fmt = "WebSocket upgrade is expected")]
|
#[display("WebSocket upgrade is expected")]
|
||||||
NoWebsocketUpgrade,
|
NoWebsocketUpgrade,
|
||||||
|
|
||||||
/// Connection header is not set to upgrade.
|
/// Connection header is not set to upgrade.
|
||||||
#[display(fmt = "connection upgrade is expected")]
|
#[display("connection upgrade is expected")]
|
||||||
NoConnectionUpgrade,
|
NoConnectionUpgrade,
|
||||||
|
|
||||||
/// WebSocket version header is not set.
|
/// WebSocket version header is not set.
|
||||||
#[display(fmt = "WebSocket version header is required")]
|
#[display("WebSocket version header is required")]
|
||||||
NoVersionHeader,
|
NoVersionHeader,
|
||||||
|
|
||||||
/// Unsupported WebSocket version.
|
/// Unsupported WebSocket version.
|
||||||
#[display(fmt = "unsupported WebSocket version")]
|
#[display("unsupported WebSocket version")]
|
||||||
UnsupportedVersion,
|
UnsupportedVersion,
|
||||||
|
|
||||||
/// WebSocket key is not set or wrong.
|
/// WebSocket key is not set or wrong.
|
||||||
#[display(fmt = "unknown WebSocket key")]
|
#[display("unknown WebSocket key")]
|
||||||
BadWebsocketKey,
|
BadWebsocketKey,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ use actix_http_test::test_server;
|
||||||
use actix_service::ServiceFactoryExt;
|
use actix_service::ServiceFactoryExt;
|
||||||
use actix_utils::future;
|
use actix_utils::future;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_util::StreamExt as _;
|
use futures_util::StreamExt as _;
|
||||||
|
|
||||||
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
|
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
|
||||||
|
@ -94,7 +94,7 @@ async fn with_query_parameter() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "expect failed")]
|
#[display("expect failed")]
|
||||||
struct ExpectFailed;
|
struct ExpectFailed;
|
||||||
|
|
||||||
impl From<ExpectFailed> for Response<BoxBody> {
|
impl From<ExpectFailed> for Response<BoxBody> {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use actix_http_test::test_server;
|
||||||
use actix_service::{fn_service, ServiceFactoryExt};
|
use actix_service::{fn_service, ServiceFactoryExt};
|
||||||
use actix_utils::future::{err, ok, ready};
|
use actix_utils::future::{err, ok, ready};
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use futures_util::{stream::once, StreamExt as _};
|
use futures_util::{stream::once, StreamExt as _};
|
||||||
use openssl::{
|
use openssl::{
|
||||||
|
@ -398,7 +398,7 @@ async fn h2_response_http_error_handling() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "error")]
|
#[display("error")]
|
||||||
struct BadRequest;
|
struct BadRequest;
|
||||||
|
|
||||||
impl From<BadRequest> for Response<BoxBody> {
|
impl From<BadRequest> for Response<BoxBody> {
|
||||||
|
|
|
@ -23,7 +23,7 @@ use actix_service::{fn_factory_with_config, fn_service};
|
||||||
use actix_tls::connect::rustls_0_23::webpki_roots_cert_store;
|
use actix_tls::connect::rustls_0_23::webpki_roots_cert_store;
|
||||||
use actix_utils::future::{err, ok, poll_fn};
|
use actix_utils::future::{err, ok, poll_fn};
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
use futures_util::stream::once;
|
use futures_util::stream::once;
|
||||||
use rustls::{pki_types::ServerName, ServerConfig as RustlsServerConfig};
|
use rustls::{pki_types::ServerName, ServerConfig as RustlsServerConfig};
|
||||||
|
@ -480,7 +480,7 @@ async fn h2_response_http_error_handling() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "error")]
|
#[display("error")]
|
||||||
struct BadRequest;
|
struct BadRequest;
|
||||||
|
|
||||||
impl From<BadRequest> for Response<BoxBody> {
|
impl From<BadRequest> for Response<BoxBody> {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use actix_rt::{net::TcpStream, time::sleep};
|
||||||
use actix_service::fn_service;
|
use actix_service::fn_service;
|
||||||
use actix_utils::future::{err, ok, ready};
|
use actix_utils::future::{err, ok, ready};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_util::{stream::once, FutureExt as _, StreamExt as _};
|
use futures_util::{stream::once, FutureExt as _, StreamExt as _};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ async fn h1_2() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "expect failed")]
|
#[display("expect failed")]
|
||||||
struct ExpectFailed;
|
struct ExpectFailed;
|
||||||
|
|
||||||
impl From<ExpectFailed> for Response<BoxBody> {
|
impl From<ExpectFailed> for Response<BoxBody> {
|
||||||
|
@ -723,7 +723,7 @@ async fn h1_response_http_error_handling() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "error")]
|
#[display("error")]
|
||||||
struct BadRequest;
|
struct BadRequest;
|
||||||
|
|
||||||
impl From<BadRequest> for Response<BoxBody> {
|
impl From<BadRequest> for Response<BoxBody> {
|
||||||
|
|
|
@ -14,7 +14,7 @@ use actix_http::{
|
||||||
use actix_http_test::test_server;
|
use actix_http_test::test_server;
|
||||||
use actix_service::{fn_factory, Service};
|
use actix_service::{fn_factory, Service};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::derive::{Display, Error, From};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use futures_util::{SinkExt as _, StreamExt as _};
|
use futures_util::{SinkExt as _, StreamExt as _};
|
||||||
|
|
||||||
|
@ -37,16 +37,16 @@ impl WsService {
|
||||||
|
|
||||||
#[derive(Debug, Display, Error, From)]
|
#[derive(Debug, Display, Error, From)]
|
||||||
enum WsServiceError {
|
enum WsServiceError {
|
||||||
#[display(fmt = "HTTP error")]
|
#[display("HTTP error")]
|
||||||
Http(actix_http::Error),
|
Http(actix_http::Error),
|
||||||
|
|
||||||
#[display(fmt = "WS handshake error")]
|
#[display("WS handshake error")]
|
||||||
Ws(actix_http::ws::HandshakeError),
|
Ws(actix_http::ws::HandshakeError),
|
||||||
|
|
||||||
#[display(fmt = "I/O error")]
|
#[display("I/O error")]
|
||||||
Io(std::io::Error),
|
Io(std::io::Error),
|
||||||
|
|
||||||
#[display(fmt = "dispatcher error")]
|
#[display("dispatcher error")]
|
||||||
Dispatcher,
|
Dispatcher,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
## 0.6.1
|
## 0.6.1
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-multipart-derive"
|
name = "actix-multipart-derive"
|
||||||
version = "0.6.1"
|
version = "0.7.0"
|
||||||
authors = ["Jacob Halsey <jacob@jhalsey.com>"]
|
authors = ["Jacob Halsey <jacob@jhalsey.com>"]
|
||||||
description = "Multipart form derive macro for Actix Web"
|
description = "Multipart form derive macro for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
|
@ -25,7 +25,10 @@ quote = "1"
|
||||||
syn = "2"
|
syn = "2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-multipart = "0.6"
|
actix-multipart = "0.7"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
rustversion = "1"
|
rustversion = "1"
|
||||||
trybuild = "1"
|
trybuild = "1"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive)
|
[![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive)
|
||||||
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart-derive/0.6.1)
|
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.7.0)](https://docs.rs/actix-multipart-derive/0.7.0)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg)
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.6.1)
|
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.7.0/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.7.0)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive)
|
[![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
|
|
@ -2,11 +2,10 @@
|
||||||
//!
|
//!
|
||||||
//! See [`macro@MultipartForm`] for usage examples.
|
//! See [`macro@MultipartForm`] for usage examples.
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![allow(clippy::disallowed_names)] // false positives in some macro expansions
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
@ -37,6 +36,7 @@ struct MultipartFormAttrs {
|
||||||
duplicate_field: DuplicateField,
|
duplicate_field: DuplicateField,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::disallowed_names)] // false positive in macro expansion
|
||||||
#[derive(FromField, Default)]
|
#[derive(FromField, Default)]
|
||||||
#[darling(attributes(multipart), default)]
|
#[darling(attributes(multipart), default)]
|
||||||
struct FieldAttrs {
|
struct FieldAttrs {
|
||||||
|
@ -138,7 +138,7 @@ struct ParsedField<'t> {
|
||||||
/// `#[multipart(duplicate_field = "<behavior>")]` attribute:
|
/// `#[multipart(duplicate_field = "<behavior>")]` attribute:
|
||||||
///
|
///
|
||||||
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
|
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
|
||||||
/// - "deny": A `MultipartError::UnsupportedField` error response is returned.
|
/// - "deny": A `MultipartError::UnknownField` error response is returned.
|
||||||
/// - "replace": Each field is processed, but only the last one is persisted.
|
/// - "replace": Each field is processed, but only the last one is persisted.
|
||||||
///
|
///
|
||||||
/// Note that `Vec` fields will ignore this option.
|
/// Note that `Vec` fields will ignore this option.
|
||||||
|
@ -229,7 +229,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
||||||
// Return value when a field name is not supported by the form
|
// Return value when a field name is not supported by the form
|
||||||
let unknown_field_result = if attrs.deny_unknown_fields {
|
let unknown_field_result = if attrs.deny_unknown_fields {
|
||||||
quote!(::std::result::Result::Err(
|
quote!(::std::result::Result::Err(
|
||||||
::actix_multipart::MultipartError::UnsupportedField(field.name().to_string())
|
::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string())
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
quote!(::std::result::Result::Ok(()))
|
quote!(::std::result::Result::Ok(()))
|
||||||
|
@ -292,7 +292,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
||||||
limits: &'t mut ::actix_multipart::form::Limits,
|
limits: &'t mut ::actix_multipart::form::Limits,
|
||||||
state: &'t mut ::actix_multipart::form::State,
|
state: &'t mut ::actix_multipart::form::State,
|
||||||
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
|
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
|
||||||
match field.name() {
|
match field.name().unwrap() {
|
||||||
#handle_field_impl
|
#handle_field_impl
|
||||||
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
|
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,30 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||||
|
|
||||||
|
## 0.7.2
|
||||||
|
|
||||||
|
- Fix re-exported version of `actix-multipart-derive`.
|
||||||
|
|
||||||
|
## 0.7.1
|
||||||
|
|
||||||
|
- Expose `LimitExceeded` error type.
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
|
- Add `MultipartError::ContentTypeIncompatible` variant.
|
||||||
|
- Add `MultipartError::ContentDispositionNameMissing` variant.
|
||||||
|
- Add `Field::bytes()` method.
|
||||||
|
- Rename `MultipartError::{NoContentDisposition => ContentDispositionMissing}` variant.
|
||||||
|
- Rename `MultipartError::{NoContentType => ContentTypeMissing}` variant.
|
||||||
|
- Rename `MultipartError::{ParseContentType => ContentTypeParse}` variant.
|
||||||
|
- Rename `MultipartError::{Boundary => BoundaryMissing}` variant.
|
||||||
|
- Rename `MultipartError::{UnsupportedField => UnknownField}` variant.
|
||||||
|
- Remove top-level re-exports of `test` utilities.
|
||||||
|
|
||||||
|
## 0.6.2
|
||||||
|
|
||||||
- Add testing utilities under new module `test`.
|
- Add testing utilities under new module `test`.
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
|
|
|
@ -1,33 +1,48 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-multipart"
|
name = "actix-multipart"
|
||||||
version = "0.6.1"
|
version = "0.7.2"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Jacob Halsey <jacob@jhalsey.com>",
|
"Jacob Halsey <jacob@jhalsey.com>",
|
||||||
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
]
|
]
|
||||||
description = "Multipart form support for Actix Web"
|
description = "Multipart request & form support for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "actix", "web", "multipart", "form"]
|
||||||
homepage = "https://actix.rs"
|
homepage.workspace = true
|
||||||
repository = "https://github.com/actix/actix-web"
|
repository.workspace = true
|
||||||
license = "MIT OR Apache-2.0"
|
license.workspace = true
|
||||||
edition = "2021"
|
edition.workspace = true
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
all-features = true
|
all-features = true
|
||||||
|
|
||||||
|
[package.metadata.cargo_check_external_types]
|
||||||
|
allowed_external_types = [
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_multipart_derive::*",
|
||||||
|
"actix_utils::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"bytes::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"mime::*",
|
||||||
|
"serde_json::*",
|
||||||
|
"serde_plain::*",
|
||||||
|
"serde::*",
|
||||||
|
"tempfile::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["tempfile", "derive"]
|
default = ["tempfile", "derive"]
|
||||||
derive = ["actix-multipart-derive"]
|
derive = ["actix-multipart-derive"]
|
||||||
tempfile = ["dep:tempfile", "tokio/fs"]
|
tempfile = ["dep:tempfile", "tokio/fs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-multipart-derive = { version = "=0.6.1", optional = true }
|
actix-multipart-derive = { version = "=0.7.0", optional = true }
|
||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-web = { version = "4", default-features = false }
|
actix-web = { version = "4", default-features = false }
|
||||||
|
|
||||||
bytes = "1"
|
derive_more = { version = "1", features = ["display", "error", "from"] }
|
||||||
derive_more = "0.99.5"
|
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
httparse = "1.3"
|
httparse = "1.3"
|
||||||
|
@ -48,8 +63,14 @@ actix-multipart-rfc7578 = "0.10"
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.2"
|
||||||
actix-test = "0.1"
|
actix-test = "0.1"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
|
assert_matches = "1"
|
||||||
awc = "3"
|
awc = "3"
|
||||||
|
env_logger = "0.11"
|
||||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
|
futures-test = "0.3"
|
||||||
multer = "3"
|
multer = "3"
|
||||||
tokio = { version = "1.24.2", features = ["sync"] }
|
tokio = { version = "1.24.2", features = ["sync"] }
|
||||||
tokio-stream = "0.1"
|
tokio-stream = "0.1"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,38 +1,32 @@
|
||||||
# `actix-multipart`
|
# `actix-multipart`
|
||||||
|
|
||||||
> Multipart form support for Actix Web.
|
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
|
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
|
||||||
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart/0.6.1)
|
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.7.2)](https://docs.rs/actix-multipart/0.7.2)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-multipart/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart/0.6.1)
|
[![dependency status](https://deps.rs/crate/actix-multipart/0.7.2/status.svg)](https://deps.rs/crate/actix-multipart/0.7.2)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
|
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
<!-- prettier-ignore-end -->
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
|
<!-- cargo-rdme start -->
|
||||||
|
|
||||||
## Example
|
Multipart request & form support for Actix Web.
|
||||||
|
|
||||||
Dependencies:
|
The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level extractor which supports reading [multipart fields](Field), in the order they are sent by the client.
|
||||||
|
|
||||||
```toml
|
Due to additional requirements for `multipart/form-data` requests, the higher level [`MultipartForm`] extractor and derive macro only supports this media type.
|
||||||
[dependencies]
|
|
||||||
actix-multipart = "0.6"
|
|
||||||
actix-web = "4.5"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
```
|
|
||||||
|
|
||||||
Code:
|
## Examples
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
use actix_web::{post, App, HttpServer, Responder};
|
use actix_web::{post, App, HttpServer, Responder};
|
||||||
|
|
||||||
use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
|
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -44,7 +38,7 @@ struct Metadata {
|
||||||
struct UploadForm {
|
struct UploadForm {
|
||||||
#[multipart(limit = "100MB")]
|
#[multipart(limit = "100MB")]
|
||||||
file: TempFile,
|
file: TempFile,
|
||||||
json: MPJson<Metadata>,
|
json: MpJson<Metadata>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/videos")]
|
#[post("/videos")]
|
||||||
|
@ -64,15 +58,17 @@ async fn main() -> std::io::Result<()> {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Curl request :
|
cURL request:
|
||||||
```bash
|
|
||||||
|
```sh
|
||||||
curl -v --request POST \
|
curl -v --request POST \
|
||||||
--url http://localhost:8080/videos \
|
--url http://localhost:8080/videos \
|
||||||
-F 'json={"name": "Cargo.lock"};type=application/json' \
|
-F 'json={"name": "Cargo.lock"};type=application/json' \
|
||||||
-F file=@./Cargo.lock
|
-F file=@./Cargo.lock
|
||||||
```
|
```
|
||||||
|
|
||||||
|
[`MultipartForm`]: struct@form::MultipartForm
|
||||||
|
|
||||||
### Examples
|
<!-- cargo-rdme end -->
|
||||||
|
|
||||||
https://github.com/actix/examples/tree/master/forms/multipart
|
[More available in the examples repo →](https://github.com/actix/examples/tree/master/forms/multipart)
|
||||||
|
|
36
actix-multipart/examples/form.rs
Normal file
36
actix-multipart/examples/form.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
|
||||||
|
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct Metadata {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, MultipartForm)]
|
||||||
|
struct UploadForm {
|
||||||
|
#[multipart(limit = "100MB")]
|
||||||
|
file: TempFile,
|
||||||
|
json: MpJson<Metadata>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/videos")]
|
||||||
|
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
|
||||||
|
format!(
|
||||||
|
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
|
||||||
|
form.json.name,
|
||||||
|
form.file.size,
|
||||||
|
form.file.file.path().display(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
|
||||||
|
.workers(2)
|
||||||
|
.bind(("127.0.0.1", 8080))?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
|
@ -5,83 +5,101 @@ use actix_web::{
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
ResponseError,
|
ResponseError,
|
||||||
};
|
};
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::derive::{Display, Error, From};
|
||||||
|
|
||||||
/// A set of errors that can occur during parsing multipart streams.
|
/// A set of errors that can occur during parsing multipart streams.
|
||||||
#[derive(Debug, Display, From, Error)]
|
#[derive(Debug, Display, From, Error)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum MultipartError {
|
pub enum Error {
|
||||||
/// Content-Disposition header is not found or is not equal to "form-data".
|
/// Could not find Content-Type header.
|
||||||
|
#[display("Could not find Content-Type header")]
|
||||||
|
ContentTypeMissing,
|
||||||
|
|
||||||
|
/// Could not parse Content-Type header.
|
||||||
|
#[display("Could not parse Content-Type header")]
|
||||||
|
ContentTypeParse,
|
||||||
|
|
||||||
|
/// Parsed Content-Type did not have "multipart" top-level media type.
|
||||||
///
|
///
|
||||||
/// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a
|
/// Also raised when extracting a [`MultipartForm`] from a request that does not have the
|
||||||
/// Content-Disposition header must always be present and equal to "form-data".
|
/// "multipart/form-data" media type.
|
||||||
#[display(fmt = "No Content-Disposition `form-data` header")]
|
///
|
||||||
NoContentDisposition,
|
/// [`MultipartForm`]: struct@crate::form::MultipartForm
|
||||||
|
#[display("Parsed Content-Type did not have 'multipart' top-level media type")]
|
||||||
|
ContentTypeIncompatible,
|
||||||
|
|
||||||
/// Content-Type header is not found
|
/// Multipart boundary is not found.
|
||||||
#[display(fmt = "No Content-Type header found")]
|
#[display("Multipart boundary is not found")]
|
||||||
NoContentType,
|
BoundaryMissing,
|
||||||
|
|
||||||
/// Can not parse Content-Type header
|
/// Content-Disposition header was not found or not of disposition type "form-data" when parsing
|
||||||
#[display(fmt = "Can not parse Content-Type header")]
|
/// a "form-data" field.
|
||||||
ParseContentType,
|
///
|
||||||
|
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
|
||||||
|
/// always be present and have a disposition type of "form-data".
|
||||||
|
///
|
||||||
|
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||||
|
#[display("Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||||
|
ContentDispositionMissing,
|
||||||
|
|
||||||
/// Multipart boundary is not found
|
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
|
||||||
#[display(fmt = "Multipart boundary is not found")]
|
///
|
||||||
Boundary,
|
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
|
||||||
|
/// always include a "name" parameter.
|
||||||
|
///
|
||||||
|
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||||
|
#[display("Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||||
|
ContentDispositionNameMissing,
|
||||||
|
|
||||||
/// Nested multipart is not supported
|
/// Nested multipart is not supported.
|
||||||
#[display(fmt = "Nested multipart is not supported")]
|
#[display("Nested multipart is not supported")]
|
||||||
Nested,
|
Nested,
|
||||||
|
|
||||||
/// Multipart stream is incomplete
|
/// Multipart stream is incomplete.
|
||||||
#[display(fmt = "Multipart stream is incomplete")]
|
#[display("Multipart stream is incomplete")]
|
||||||
Incomplete,
|
Incomplete,
|
||||||
|
|
||||||
/// Error during field parsing
|
/// Field parsing failed.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("Error during field parsing")]
|
||||||
Parse(ParseError),
|
Parse(ParseError),
|
||||||
|
|
||||||
/// Payload error
|
/// HTTP payload error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("Payload error")]
|
||||||
Payload(PayloadError),
|
Payload(PayloadError),
|
||||||
|
|
||||||
/// Not consumed
|
/// Stream is not consumed.
|
||||||
#[display(fmt = "Multipart stream is not consumed")]
|
#[display("Stream is not consumed")]
|
||||||
NotConsumed,
|
NotConsumed,
|
||||||
|
|
||||||
/// An error from a field handler in a form
|
/// Form field handler raised error.
|
||||||
#[display(
|
#[display("An error occurred processing field: {name}")]
|
||||||
fmt = "An error occurred processing field `{}`: {}",
|
|
||||||
field_name,
|
|
||||||
source
|
|
||||||
)]
|
|
||||||
Field {
|
Field {
|
||||||
field_name: String,
|
name: String,
|
||||||
source: actix_web::Error,
|
source: actix_web::Error,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Duplicate field
|
/// Duplicate field found (for structure that opted-in to denying duplicate fields).
|
||||||
#[display(fmt = "Duplicate field found for: `{}`", _0)]
|
#[display("Duplicate field found: {_0}")]
|
||||||
#[from(ignore)]
|
#[from(ignore)]
|
||||||
DuplicateField(#[error(not(source))] String),
|
DuplicateField(#[error(not(source))] String),
|
||||||
|
|
||||||
/// Missing field
|
/// Required field is missing.
|
||||||
#[display(fmt = "Field with name `{}` is required", _0)]
|
#[display("Required field is missing: {_0}")]
|
||||||
#[from(ignore)]
|
#[from(ignore)]
|
||||||
MissingField(#[error(not(source))] String),
|
MissingField(#[error(not(source))] String),
|
||||||
|
|
||||||
/// Unknown field
|
/// Unknown field (for structure that opted-in to denying unknown fields).
|
||||||
#[display(fmt = "Unsupported field `{}`", _0)]
|
#[display("Unknown field: {_0}")]
|
||||||
#[from(ignore)]
|
#[from(ignore)]
|
||||||
UnsupportedField(#[error(not(source))] String),
|
UnknownField(#[error(not(source))] String),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `BadRequest` for `MultipartError`
|
/// Return `BadRequest` for `MultipartError`.
|
||||||
impl ResponseError for MultipartError {
|
impl ResponseError for Error {
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
match &self {
|
match &self {
|
||||||
MultipartError::Field { source, .. } => source.as_response_error().status_code(),
|
Error::Field { source, .. } => source.as_response_error().status_code(),
|
||||||
|
Error::ContentTypeIncompatible => StatusCode::UNSUPPORTED_MEDIA_TYPE,
|
||||||
_ => StatusCode::BAD_REQUEST,
|
_ => StatusCode::BAD_REQUEST,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,7 +111,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multipart_error() {
|
fn test_multipart_error() {
|
||||||
let resp = MultipartError::Boundary.error_response();
|
let resp = Error::BoundaryMissing.error_response();
|
||||||
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +1,20 @@
|
||||||
//! Multipart payload support
|
|
||||||
|
|
||||||
use actix_utils::future::{ready, Ready};
|
use actix_utils::future::{ready, Ready};
|
||||||
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
|
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
|
||||||
|
|
||||||
use crate::server::Multipart;
|
use crate::multipart::Multipart;
|
||||||
|
|
||||||
/// Get request's payload as multipart stream.
|
/// Extract request's payload as multipart stream.
|
||||||
///
|
///
|
||||||
/// Content-type: multipart/form-data;
|
/// Content-type: multipart/*;
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_web::{web, HttpResponse, Error};
|
/// use actix_web::{web, HttpResponse};
|
||||||
/// use actix_multipart::Multipart;
|
/// use actix_multipart::Multipart;
|
||||||
/// use futures_util::StreamExt as _;
|
/// use futures_util::StreamExt as _;
|
||||||
///
|
///
|
||||||
/// async fn index(mut payload: Multipart) -> Result<HttpResponse, Error> {
|
/// async fn index(mut payload: Multipart) -> actix_web::Result<HttpResponse> {
|
||||||
/// // iterate over multipart stream
|
/// // iterate over multipart stream
|
||||||
/// while let Some(item) = payload.next().await {
|
/// while let Some(item) = payload.next().await {
|
||||||
/// let mut field = item?;
|
/// let mut field = item?;
|
||||||
|
@ -26,7 +25,7 @@ use crate::server::Multipart;
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// Ok(HttpResponse::Ok().into())
|
/// Ok(HttpResponse::Ok().finish())
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
impl FromRequest for Multipart {
|
impl FromRequest for Multipart {
|
||||||
|
@ -35,9 +34,6 @@ impl FromRequest for Multipart {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||||
ready(Ok(match Multipart::boundary(req.headers()) {
|
ready(Ok(Multipart::from_req(req, payload)))
|
||||||
Ok(boundary) => Multipart::from_boundary(boundary, payload.take()),
|
|
||||||
Err(err) => Multipart::from_error(err),
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
501
actix-multipart/src/field.rs
Normal file
501
actix-multipart/src/field.rs
Normal file
|
@ -0,0 +1,501 @@
|
||||||
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
|
cmp, fmt,
|
||||||
|
future::poll_fn,
|
||||||
|
mem,
|
||||||
|
pin::Pin,
|
||||||
|
rc::Rc,
|
||||||
|
task::{ready, Context, Poll},
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_web::{
|
||||||
|
error::PayloadError,
|
||||||
|
http::header::{self, ContentDisposition, HeaderMap},
|
||||||
|
web::{Bytes, BytesMut},
|
||||||
|
};
|
||||||
|
use derive_more::derive::{Display, Error};
|
||||||
|
use futures_core::Stream;
|
||||||
|
use mime::Mime;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
payload::{PayloadBuffer, PayloadRef},
|
||||||
|
safety::Safety,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Error type returned from [`Field::bytes()`] when field data is larger than limit.
|
||||||
|
#[derive(Debug, Display, Error)]
|
||||||
|
#[display("size limit exceeded while collecting field data")]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct LimitExceeded;
|
||||||
|
|
||||||
|
/// A single field in a multipart stream.
|
||||||
|
pub struct Field {
|
||||||
|
/// Field's Content-Type.
|
||||||
|
content_type: Option<Mime>,
|
||||||
|
|
||||||
|
/// Field's Content-Disposition.
|
||||||
|
content_disposition: Option<ContentDisposition>,
|
||||||
|
|
||||||
|
/// Form field name.
|
||||||
|
///
|
||||||
|
/// A non-optional storage for form field names to avoid unwraps in `form` module. Will be an
|
||||||
|
/// empty string in non-form contexts.
|
||||||
|
///
|
||||||
|
// INVARIANT: always non-empty when request content-type is multipart/form-data.
|
||||||
|
pub(crate) form_field_name: String,
|
||||||
|
|
||||||
|
/// Field's header map.
|
||||||
|
headers: HeaderMap,
|
||||||
|
|
||||||
|
safety: Safety,
|
||||||
|
inner: Rc<RefCell<InnerField>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Field {
|
||||||
|
pub(crate) fn new(
|
||||||
|
content_type: Option<Mime>,
|
||||||
|
content_disposition: Option<ContentDisposition>,
|
||||||
|
form_field_name: Option<String>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
safety: Safety,
|
||||||
|
inner: Rc<RefCell<InnerField>>,
|
||||||
|
) -> Self {
|
||||||
|
Field {
|
||||||
|
content_type,
|
||||||
|
content_disposition,
|
||||||
|
form_field_name: form_field_name.unwrap_or_default(),
|
||||||
|
headers,
|
||||||
|
inner,
|
||||||
|
safety,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the field's header map.
|
||||||
|
pub fn headers(&self) -> &HeaderMap {
|
||||||
|
&self.headers
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the field's content (mime) type, if it is supplied by the client.
|
||||||
|
///
|
||||||
|
/// According to [RFC 7578](https://www.rfc-editor.org/rfc/rfc7578#section-4.4), if it is not
|
||||||
|
/// present, it should default to "text/plain". Note it is the responsibility of the client to
|
||||||
|
/// provide the appropriate content type, there is no attempt to validate this by the server.
|
||||||
|
pub fn content_type(&self) -> Option<&Mime> {
|
||||||
|
self.content_type.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns this field's parsed Content-Disposition header, if set.
|
||||||
|
///
|
||||||
|
/// # Validation
|
||||||
|
///
|
||||||
|
/// Per [RFC 7578 §4.2], the parts of a multipart/form-data payload MUST contain a
|
||||||
|
/// Content-Disposition header field where the disposition type is `form-data` and MUST also
|
||||||
|
/// contain an additional parameter of `name` with its value being the original field name from
|
||||||
|
/// the form. This requirement is enforced during extraction for multipart/form-data requests,
|
||||||
|
/// but not other kinds of multipart requests (such as multipart/related).
|
||||||
|
///
|
||||||
|
/// As such, it is safe to `.unwrap()` calls `.content_disposition()` if you've verified.
|
||||||
|
///
|
||||||
|
/// The [`name()`](Self::name) method is also provided as a convenience for obtaining the
|
||||||
|
/// aforementioned name parameter.
|
||||||
|
///
|
||||||
|
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||||
|
pub fn content_disposition(&self) -> Option<&ContentDisposition> {
|
||||||
|
self.content_disposition.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the field's name, if set.
|
||||||
|
///
|
||||||
|
/// See [`content_disposition()`](Self::content_disposition) regarding guarantees on presence of
|
||||||
|
/// the "name" field.
|
||||||
|
pub fn name(&self) -> Option<&str> {
|
||||||
|
self.content_disposition()?.get_name()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collects the raw field data, up to `limit` bytes.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Any errors produced by the data stream are returned as `Ok(Err(Error))` immediately.
|
||||||
|
///
|
||||||
|
/// If the buffered data size would exceed `limit`, an `Err(LimitExceeded)` is returned. Note
|
||||||
|
/// that, in this case, the full data stream is exhausted before returning the error so that
|
||||||
|
/// subsequent fields can still be read. To better defend against malicious/infinite requests,
|
||||||
|
/// it is advisable to also put a timeout on this call.
|
||||||
|
pub async fn bytes(&mut self, limit: usize) -> Result<Result<Bytes, Error>, LimitExceeded> {
|
||||||
|
/// Sensible default (2kB) for initial, bounded allocation when collecting body bytes.
|
||||||
|
const INITIAL_ALLOC_BYTES: usize = 2 * 1024;
|
||||||
|
|
||||||
|
let mut exceeded_limit = false;
|
||||||
|
let mut buf = BytesMut::with_capacity(INITIAL_ALLOC_BYTES);
|
||||||
|
|
||||||
|
let mut field = Pin::new(self);
|
||||||
|
|
||||||
|
match poll_fn(|cx| loop {
|
||||||
|
match ready!(field.as_mut().poll_next(cx)) {
|
||||||
|
// if already over limit, discard chunk to advance multipart request
|
||||||
|
Some(Ok(_chunk)) if exceeded_limit => {}
|
||||||
|
|
||||||
|
// if limit is exceeded set flag to true and continue
|
||||||
|
Some(Ok(chunk)) if buf.len() + chunk.len() > limit => {
|
||||||
|
exceeded_limit = true;
|
||||||
|
// eagerly de-allocate field data buffer
|
||||||
|
let _ = mem::take(&mut buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Ok(chunk)) => buf.extend_from_slice(&chunk),
|
||||||
|
|
||||||
|
None => return Poll::Ready(Ok(())),
|
||||||
|
Some(Err(err)) => return Poll::Ready(Err(err)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
// propagate error returned from body poll
|
||||||
|
Err(err) => Ok(Err(err)),
|
||||||
|
|
||||||
|
// limit was exceeded while reading body
|
||||||
|
Ok(()) if exceeded_limit => Err(LimitExceeded),
|
||||||
|
|
||||||
|
// otherwise return body buffer
|
||||||
|
Ok(()) => Ok(Ok(buf.freeze())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Stream for Field {
|
||||||
|
type Item = Result<Bytes, Error>;
|
||||||
|
|
||||||
|
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
let this = self.get_mut();
|
||||||
|
let mut inner = this.inner.borrow_mut();
|
||||||
|
|
||||||
|
if let Some(mut buffer) = inner
|
||||||
|
.payload
|
||||||
|
.as_ref()
|
||||||
|
.expect("Field should not be polled after completion")
|
||||||
|
.get_mut(&this.safety)
|
||||||
|
{
|
||||||
|
// check safety and poll read payload to buffer.
|
||||||
|
buffer.poll_stream(cx)?;
|
||||||
|
} else if !this.safety.is_clean() {
|
||||||
|
// safety violation
|
||||||
|
return Poll::Ready(Some(Err(Error::NotConsumed)));
|
||||||
|
} else {
|
||||||
|
return Poll::Pending;
|
||||||
|
}
|
||||||
|
|
||||||
|
inner.poll(&this.safety)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Field {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
if let Some(ct) = &self.content_type {
|
||||||
|
writeln!(f, "\nField: {}", ct)?;
|
||||||
|
} else {
|
||||||
|
writeln!(f, "\nField:")?;
|
||||||
|
}
|
||||||
|
writeln!(f, " boundary: {}", self.inner.borrow().boundary)?;
|
||||||
|
writeln!(f, " headers:")?;
|
||||||
|
for (key, val) in self.headers.iter() {
|
||||||
|
writeln!(f, " {:?}: {:?}", key, val)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct InnerField {
|
||||||
|
/// Payload is initialized as Some and is `take`n when the field stream finishes.
|
||||||
|
payload: Option<PayloadRef>,
|
||||||
|
|
||||||
|
/// Field boundary (without "--" prefix).
|
||||||
|
boundary: String,
|
||||||
|
|
||||||
|
/// True if request payload has been exhausted.
|
||||||
|
eof: bool,
|
||||||
|
|
||||||
|
/// Field data's stated size according to it's Content-Length header.
|
||||||
|
length: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InnerField {
|
||||||
|
pub(crate) fn new_in_rc(
|
||||||
|
payload: PayloadRef,
|
||||||
|
boundary: String,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
|
||||||
|
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn new(
|
||||||
|
payload: PayloadRef,
|
||||||
|
boundary: String,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
) -> Result<InnerField, PayloadError> {
|
||||||
|
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
|
||||||
|
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
|
||||||
|
Some(len) => Some(len),
|
||||||
|
None => return Err(PayloadError::Incomplete(None)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(InnerField {
|
||||||
|
boundary,
|
||||||
|
payload: Some(payload),
|
||||||
|
eof: false,
|
||||||
|
length: len,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads body part content chunk of the specified size.
|
||||||
|
///
|
||||||
|
/// The body part must has `Content-Length` header with proper value.
|
||||||
|
pub(crate) fn read_len(
|
||||||
|
payload: &mut PayloadBuffer,
|
||||||
|
size: &mut u64,
|
||||||
|
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||||
|
if *size == 0 {
|
||||||
|
Poll::Ready(None)
|
||||||
|
} else {
|
||||||
|
match payload.read_max(*size)? {
|
||||||
|
Some(mut chunk) => {
|
||||||
|
let len = cmp::min(chunk.len() as u64, *size);
|
||||||
|
*size -= len;
|
||||||
|
let ch = chunk.split_to(len as usize);
|
||||||
|
if !chunk.is_empty() {
|
||||||
|
payload.unprocessed(chunk);
|
||||||
|
}
|
||||||
|
Poll::Ready(Some(Ok(ch)))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
if payload.eof && (*size != 0) {
|
||||||
|
Poll::Ready(Some(Err(Error::Incomplete)))
|
||||||
|
} else {
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads content chunk of body part with unknown length.
|
||||||
|
///
|
||||||
|
/// The `Content-Length` header for body part is not necessary.
|
||||||
|
pub(crate) fn read_stream(
|
||||||
|
payload: &mut PayloadBuffer,
|
||||||
|
boundary: &str,
|
||||||
|
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||||
|
let mut pos = 0;
|
||||||
|
|
||||||
|
let len = payload.buf.len();
|
||||||
|
|
||||||
|
if len == 0 {
|
||||||
|
return if payload.eof {
|
||||||
|
Poll::Ready(Some(Err(Error::Incomplete)))
|
||||||
|
} else {
|
||||||
|
Poll::Pending
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// check boundary
|
||||||
|
if len > 4 && payload.buf[0] == b'\r' {
|
||||||
|
let b_len = if payload.buf.starts_with(b"\r\n") && &payload.buf[2..4] == b"--" {
|
||||||
|
Some(4)
|
||||||
|
} else if &payload.buf[1..3] == b"--" {
|
||||||
|
Some(3)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(b_len) = b_len {
|
||||||
|
let b_size = boundary.len() + b_len;
|
||||||
|
if len < b_size {
|
||||||
|
return Poll::Pending;
|
||||||
|
} else if &payload.buf[b_len..b_size] == boundary.as_bytes() {
|
||||||
|
// found boundary
|
||||||
|
return Poll::Ready(None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
return if let Some(idx) = memchr::memmem::find(&payload.buf[pos..], b"\r") {
|
||||||
|
let cur = pos + idx;
|
||||||
|
|
||||||
|
// check if we have enough data for boundary detection
|
||||||
|
if cur + 4 > len {
|
||||||
|
if cur > 0 {
|
||||||
|
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
|
||||||
|
} else {
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// check boundary
|
||||||
|
if (&payload.buf[cur..cur + 2] == b"\r\n"
|
||||||
|
&& &payload.buf[cur + 2..cur + 4] == b"--")
|
||||||
|
|| (&payload.buf[cur..=cur] == b"\r"
|
||||||
|
&& &payload.buf[cur + 1..cur + 3] == b"--")
|
||||||
|
{
|
||||||
|
if cur != 0 {
|
||||||
|
// return buffer
|
||||||
|
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
|
||||||
|
} else {
|
||||||
|
pos = cur + 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// not boundary
|
||||||
|
pos = cur + 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Poll::Ready(Some(Ok(payload.buf.split().freeze())))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn poll(&mut self, safety: &Safety) -> Poll<Option<Result<Bytes, Error>>> {
|
||||||
|
if self.payload.is_none() {
|
||||||
|
return Poll::Ready(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(mut payload) = self
|
||||||
|
.payload
|
||||||
|
.as_ref()
|
||||||
|
.expect("Field should not be polled after completion")
|
||||||
|
.get_mut(safety)
|
||||||
|
else {
|
||||||
|
return Poll::Pending;
|
||||||
|
};
|
||||||
|
|
||||||
|
if !self.eof {
|
||||||
|
let res = if let Some(ref mut len) = self.length {
|
||||||
|
Self::read_len(&mut payload, len)
|
||||||
|
} else {
|
||||||
|
Self::read_stream(&mut payload, &self.boundary)
|
||||||
|
};
|
||||||
|
|
||||||
|
match ready!(res) {
|
||||||
|
Some(Ok(bytes)) => return Poll::Ready(Some(Ok(bytes))),
|
||||||
|
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
|
||||||
|
None => self.eof = true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = match payload.readline() {
|
||||||
|
Ok(None) => Poll::Pending,
|
||||||
|
Ok(Some(line)) => {
|
||||||
|
if line.as_ref() != b"\r\n" {
|
||||||
|
log::warn!("multipart field did not read all the data or it is malformed");
|
||||||
|
}
|
||||||
|
Poll::Ready(None)
|
||||||
|
}
|
||||||
|
Err(err) => Poll::Ready(Some(Err(err))),
|
||||||
|
};
|
||||||
|
|
||||||
|
drop(payload);
|
||||||
|
|
||||||
|
if let Poll::Ready(None) = result {
|
||||||
|
// drop payload buffer and make future un-poll-able
|
||||||
|
let _ = self.payload.take();
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use futures_util::{stream, StreamExt as _};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::Multipart;
|
||||||
|
|
||||||
|
// TODO: use test utility when multi-file support is introduced
|
||||||
|
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
|
||||||
|
let bytes = Bytes::from(
|
||||||
|
"testasdadsad\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\n\
|
||||||
|
\r\n\
|
||||||
|
one+one+one\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\n\
|
||||||
|
\r\n\
|
||||||
|
two+two+two\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
|
||||||
|
);
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
(bytes, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn bytes_unlimited() {
|
||||||
|
let (body, headers) = create_double_request_with_header();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
|
||||||
|
|
||||||
|
let field = multipart
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.expect("multipart should have two fields")
|
||||||
|
.expect("multipart body should be well formatted")
|
||||||
|
.bytes(usize::MAX)
|
||||||
|
.await
|
||||||
|
.expect("field data should not be size limited")
|
||||||
|
.expect("reading field data should not error");
|
||||||
|
assert_eq!(field, "one+one+one");
|
||||||
|
|
||||||
|
let field = multipart
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.expect("multipart should have two fields")
|
||||||
|
.expect("multipart body should be well formatted")
|
||||||
|
.bytes(usize::MAX)
|
||||||
|
.await
|
||||||
|
.expect("field data should not be size limited")
|
||||||
|
.expect("reading field data should not error");
|
||||||
|
assert_eq!(field, "two+two+two");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn bytes_limited() {
|
||||||
|
let (body, headers) = create_double_request_with_header();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
|
||||||
|
|
||||||
|
multipart
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.expect("multipart should have two fields")
|
||||||
|
.expect("multipart body should be well formatted")
|
||||||
|
.bytes(8) // smaller than data size
|
||||||
|
.await
|
||||||
|
.expect_err("field data should be size limited");
|
||||||
|
|
||||||
|
// next field still readable
|
||||||
|
let field = multipart
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.expect("multipart should have two fields")
|
||||||
|
.expect("multipart body should be well formatted")
|
||||||
|
.bytes(usize::MAX)
|
||||||
|
.await
|
||||||
|
.expect("field data should not be size limited")
|
||||||
|
.expect("reading field data should not error");
|
||||||
|
assert_eq!(field, "two+two+two");
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,6 @@
|
||||||
//! Reads a field into memory.
|
//! Reads a field into memory.
|
||||||
|
|
||||||
use actix_web::HttpRequest;
|
use actix_web::{web::BytesMut, HttpRequest};
|
||||||
use bytes::BytesMut;
|
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use futures_util::TryStreamExt as _;
|
use futures_util::TryStreamExt as _;
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
|
@ -15,7 +14,7 @@ use crate::{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Bytes {
|
pub struct Bytes {
|
||||||
/// The data.
|
/// The data.
|
||||||
pub data: bytes::Bytes,
|
pub data: actix_web::web::Bytes,
|
||||||
|
|
||||||
/// The value of the `Content-Type` header.
|
/// The value of the `Content-Type` header.
|
||||||
pub content_type: Option<Mime>,
|
pub content_type: Option<Mime>,
|
||||||
|
@ -41,8 +40,9 @@ impl<'t> FieldReader<'t> for Bytes {
|
||||||
content_type: field.content_type().map(ToOwned::to_owned),
|
content_type: field.content_type().map(ToOwned::to_owned),
|
||||||
file_name: field
|
file_name: field
|
||||||
.content_disposition()
|
.content_disposition()
|
||||||
|
.expect("multipart form fields should have a content-disposition header")
|
||||||
.get_filename()
|
.get_filename()
|
||||||
.map(str::to_owned),
|
.map(ToOwned::to_owned),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
||||||
use derive_more::{Deref, DerefMut, Display, Error};
|
use derive_more::derive::{Deref, DerefMut, Display, Error};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
|
|
||||||
|
@ -32,7 +32,6 @@ where
|
||||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let config = JsonConfig::from_req(req);
|
let config = JsonConfig::from_req(req);
|
||||||
let field_name = field.name().to_owned();
|
|
||||||
|
|
||||||
if config.validate_content_type {
|
if config.validate_content_type {
|
||||||
let valid = if let Some(mime) = field.content_type() {
|
let valid = if let Some(mime) = field.content_type() {
|
||||||
|
@ -43,17 +42,19 @@ where
|
||||||
|
|
||||||
if !valid {
|
if !valid {
|
||||||
return Err(MultipartError::Field {
|
return Err(MultipartError::Field {
|
||||||
field_name,
|
name: field.form_field_name,
|
||||||
source: config.map_error(req, JsonFieldError::ContentType),
|
source: config.map_error(req, JsonFieldError::ContentType),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let form_field_name = field.form_field_name.clone();
|
||||||
|
|
||||||
let bytes = Bytes::read_field(req, field, limits).await?;
|
let bytes = Bytes::read_field(req, field, limits).await?;
|
||||||
|
|
||||||
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|
||||||
|err| MultipartError::Field {
|
|err| MultipartError::Field {
|
||||||
field_name,
|
name: form_field_name,
|
||||||
source: config.map_error(req, JsonFieldError::Deserialize(err)),
|
source: config.map_error(req, JsonFieldError::Deserialize(err)),
|
||||||
},
|
},
|
||||||
)?))
|
)?))
|
||||||
|
@ -65,11 +66,11 @@ where
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum JsonFieldError {
|
pub enum JsonFieldError {
|
||||||
/// Deserialize error.
|
/// Deserialize error.
|
||||||
#[display(fmt = "Json deserialize error: {}", _0)]
|
#[display("Json deserialize error: {}", _0)]
|
||||||
Deserialize(serde_json::Error),
|
Deserialize(serde_json::Error),
|
||||||
|
|
||||||
/// Content type error.
|
/// Content type error.
|
||||||
#[display(fmt = "Content type error")]
|
#[display("Content type error")]
|
||||||
ContentType,
|
ContentType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,8 +134,7 @@ impl Default for JsonConfig {
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, web, App, HttpResponse, Responder};
|
use actix_web::{http::StatusCode, web, web::Bytes, App, HttpResponse, Responder};
|
||||||
use bytes::Bytes;
|
|
||||||
|
|
||||||
use crate::form::{
|
use crate::form::{
|
||||||
json::{Json, JsonConfig},
|
json::{Json, JsonConfig},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Process and extract typed data from a multipart stream.
|
//! Extract and process typed data from fields of a `multipart/form-data` request.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
any::Any,
|
any::Any,
|
||||||
|
@ -8,7 +8,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_web::{dev, error::PayloadError, web, Error, FromRequest, HttpRequest};
|
use actix_web::{dev, error::PayloadError, web, Error, FromRequest, HttpRequest};
|
||||||
use derive_more::{Deref, DerefMut};
|
use derive_more::derive::{Deref, DerefMut};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use futures_util::{TryFutureExt as _, TryStreamExt as _};
|
use futures_util::{TryFutureExt as _, TryStreamExt as _};
|
||||||
|
|
||||||
|
@ -33,6 +33,14 @@ pub trait FieldReader<'t>: Sized + Any {
|
||||||
type Future: Future<Output = Result<Self, MultipartError>>;
|
type Future: Future<Output = Result<Self, MultipartError>>;
|
||||||
|
|
||||||
/// The form will call this function to handle the field.
|
/// The form will call this function to handle the field.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// When reading the `field` payload using its `Stream` implementation, polling (manually or via
|
||||||
|
/// `next()`/`try_next()`) may panic after the payload is exhausted. If this is a problem for
|
||||||
|
/// your implementation of this method, you should [`fuse()`] the `Field` first.
|
||||||
|
///
|
||||||
|
/// [`fuse()`]: futures_util::stream::StreamExt::fuse()
|
||||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
|
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,13 +80,13 @@ where
|
||||||
state: &'t mut State,
|
state: &'t mut State,
|
||||||
duplicate_field: DuplicateField,
|
duplicate_field: DuplicateField,
|
||||||
) -> Self::Future {
|
) -> Self::Future {
|
||||||
if state.contains_key(field.name()) {
|
if state.contains_key(&field.form_field_name) {
|
||||||
match duplicate_field {
|
match duplicate_field {
|
||||||
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
||||||
|
|
||||||
DuplicateField::Deny => {
|
DuplicateField::Deny => {
|
||||||
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
||||||
field.name().to_owned(),
|
field.form_field_name,
|
||||||
))))
|
))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +95,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let field_name = field.name().to_owned();
|
let field_name = field.form_field_name.clone();
|
||||||
let t = T::read_field(req, field, limits).await?;
|
let t = T::read_field(req, field, limits).await?;
|
||||||
state.insert(field_name, Box::new(t));
|
state.insert(field_name, Box::new(t));
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -115,10 +123,8 @@ where
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
// Note: Vec GroupReader always allows duplicates
|
// Note: Vec GroupReader always allows duplicates
|
||||||
|
|
||||||
let field_name = field.name().to_owned();
|
|
||||||
|
|
||||||
let vec = state
|
let vec = state
|
||||||
.entry(field_name)
|
.entry(field.form_field_name.clone())
|
||||||
.or_insert_with(|| Box::<Vec<T>>::default())
|
.or_insert_with(|| Box::<Vec<T>>::default())
|
||||||
.downcast_mut::<Vec<T>>()
|
.downcast_mut::<Vec<T>>()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -151,13 +157,13 @@ where
|
||||||
state: &'t mut State,
|
state: &'t mut State,
|
||||||
duplicate_field: DuplicateField,
|
duplicate_field: DuplicateField,
|
||||||
) -> Self::Future {
|
) -> Self::Future {
|
||||||
if state.contains_key(field.name()) {
|
if state.contains_key(&field.form_field_name) {
|
||||||
match duplicate_field {
|
match duplicate_field {
|
||||||
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
||||||
|
|
||||||
DuplicateField::Deny => {
|
DuplicateField::Deny => {
|
||||||
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
||||||
field.name().to_owned(),
|
field.form_field_name,
|
||||||
))))
|
))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +172,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let field_name = field.name().to_owned();
|
let field_name = field.form_field_name.clone();
|
||||||
let t = T::read_field(req, field, limits).await?;
|
let t = T::read_field(req, field, limits).await?;
|
||||||
state.insert(field_name, Box::new(t));
|
state.insert(field_name, Box::new(t));
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -273,6 +279,9 @@ impl Limits {
|
||||||
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
|
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
|
||||||
/// for your struct.
|
/// for your struct.
|
||||||
///
|
///
|
||||||
|
/// Note that this extractor rejects requests with any other Content-Type such as `multipart/mixed`,
|
||||||
|
/// `multipart/related`, or non-multipart media types.
|
||||||
|
///
|
||||||
/// Add a [`MultipartFormConfig`] to your app data to configure extraction.
|
/// Add a [`MultipartFormConfig`] to your app data to configure extraction.
|
||||||
#[derive(Deref, DerefMut)]
|
#[derive(Deref, DerefMut)]
|
||||||
pub struct MultipartForm<T: MultipartCollect>(pub T);
|
pub struct MultipartForm<T: MultipartCollect>(pub T);
|
||||||
|
@ -286,14 +295,24 @@ impl<T: MultipartCollect> MultipartForm<T> {
|
||||||
|
|
||||||
impl<T> FromRequest for MultipartForm<T>
|
impl<T> FromRequest for MultipartForm<T>
|
||||||
where
|
where
|
||||||
T: MultipartCollect,
|
T: MultipartCollect + 'static,
|
||||||
{
|
{
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
|
||||||
let mut payload = Multipart::new(req.headers(), payload.take());
|
let mut multipart = Multipart::from_req(req, payload);
|
||||||
|
|
||||||
|
let content_type = match multipart.content_type_or_bail() {
|
||||||
|
Ok(content_type) => content_type,
|
||||||
|
Err(err) => return Box::pin(ready(Err(err.into()))),
|
||||||
|
};
|
||||||
|
|
||||||
|
if content_type.subtype() != mime::FORM_DATA {
|
||||||
|
// this extractor only supports multipart/form-data
|
||||||
|
return Box::pin(ready(Err(MultipartError::ContentTypeIncompatible.into())));
|
||||||
|
};
|
||||||
|
|
||||||
let config = MultipartFormConfig::from_req(req);
|
let config = MultipartFormConfig::from_req(req);
|
||||||
let mut limits = Limits::new(config.total_limit, config.memory_limit);
|
let mut limits = Limits::new(config.total_limit, config.memory_limit);
|
||||||
|
@ -305,14 +324,20 @@ where
|
||||||
Box::pin(
|
Box::pin(
|
||||||
async move {
|
async move {
|
||||||
let mut state = State::default();
|
let mut state = State::default();
|
||||||
// We need to ensure field limits are shared for all instances of this field name
|
|
||||||
|
// ensure limits are shared for all fields with this name
|
||||||
let mut field_limits = HashMap::<String, Option<usize>>::new();
|
let mut field_limits = HashMap::<String, Option<usize>>::new();
|
||||||
|
|
||||||
while let Some(field) = payload.try_next().await? {
|
while let Some(field) = multipart.try_next().await? {
|
||||||
|
debug_assert!(
|
||||||
|
!field.form_field_name.is_empty(),
|
||||||
|
"multipart form fields should have names",
|
||||||
|
);
|
||||||
|
|
||||||
// Retrieve the limit for this field
|
// Retrieve the limit for this field
|
||||||
let entry = field_limits
|
let entry = field_limits
|
||||||
.entry(field.name().to_owned())
|
.entry(field.form_field_name.clone())
|
||||||
.or_insert_with(|| T::limit(field.name()));
|
.or_insert_with(|| T::limit(&field.form_field_name));
|
||||||
|
|
||||||
limits.field_limit_remaining.clone_from(entry);
|
limits.field_limit_remaining.clone_from(entry);
|
||||||
|
|
||||||
|
@ -321,6 +346,7 @@ where
|
||||||
// Update the stored limit
|
// Update the stored limit
|
||||||
*entry = limits.field_limit_remaining;
|
*entry = limits.field_limit_remaining;
|
||||||
}
|
}
|
||||||
|
|
||||||
let inner = T::from_state(state)?;
|
let inner = T::from_state(state)?;
|
||||||
Ok(MultipartForm(inner))
|
Ok(MultipartForm(inner))
|
||||||
}
|
}
|
||||||
|
@ -396,11 +422,20 @@ mod tests {
|
||||||
use actix_http::encoding::Decoder;
|
use actix_http::encoding::Decoder;
|
||||||
use actix_multipart_rfc7578::client::multipart;
|
use actix_multipart_rfc7578::client::multipart;
|
||||||
use actix_test::TestServer;
|
use actix_test::TestServer;
|
||||||
use actix_web::{dev::Payload, http::StatusCode, web, App, HttpResponse, Responder};
|
use actix_web::{
|
||||||
|
dev::Payload, http::StatusCode, web, App, HttpRequest, HttpResponse, Resource, Responder,
|
||||||
|
};
|
||||||
use awc::{Client, ClientResponse};
|
use awc::{Client, ClientResponse};
|
||||||
|
use futures_core::future::LocalBoxFuture;
|
||||||
|
use futures_util::TryStreamExt as _;
|
||||||
|
|
||||||
use super::MultipartForm;
|
use super::MultipartForm;
|
||||||
use crate::form::{bytes::Bytes, tempfile::TempFile, text::Text, MultipartFormConfig};
|
use crate::{
|
||||||
|
form::{
|
||||||
|
bytes::Bytes, tempfile::TempFile, text::Text, FieldReader, Limits, MultipartFormConfig,
|
||||||
|
},
|
||||||
|
Field, MultipartError,
|
||||||
|
};
|
||||||
|
|
||||||
pub async fn send_form(
|
pub async fn send_form(
|
||||||
srv: &TestServer,
|
srv: &TestServer,
|
||||||
|
@ -734,4 +769,84 @@ mod tests {
|
||||||
let response = send_form(&srv, form, "/").await;
|
let response = send_form(&srv, form, "/").await;
|
||||||
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
|
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn non_multipart_form_data() {
|
||||||
|
#[derive(MultipartForm)]
|
||||||
|
struct TestNonMultipartFormData {
|
||||||
|
#[allow(unused)]
|
||||||
|
#[multipart(limit = "30B")]
|
||||||
|
foo: Text<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn non_multipart_form_data_route(
|
||||||
|
_form: MultipartForm<TestNonMultipartFormData>,
|
||||||
|
) -> String {
|
||||||
|
unreachable!("request is sent with multipart/mixed");
|
||||||
|
}
|
||||||
|
|
||||||
|
let srv = actix_test::start(|| {
|
||||||
|
App::new().route("/", web::post().to(non_multipart_form_data_route))
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut form = multipart::Form::default();
|
||||||
|
form.add_text("foo", "foo");
|
||||||
|
|
||||||
|
// mangle content-type, keeping the boundary
|
||||||
|
let ct = form.content_type().replacen("/form-data", "/mixed", 1);
|
||||||
|
|
||||||
|
let res = Client::default()
|
||||||
|
.post(srv.url("/"))
|
||||||
|
.content_type(ct)
|
||||||
|
.send_body(multipart::Body::from(form))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(res.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[should_panic(expected = "called `Result::unwrap()` on an `Err` value: Connect(Disconnected)")]
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn field_try_next_panic() {
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct NullSink;
|
||||||
|
|
||||||
|
impl<'t> FieldReader<'t> for NullSink {
|
||||||
|
type Future = LocalBoxFuture<'t, Result<Self, MultipartError>>;
|
||||||
|
|
||||||
|
fn read_field(
|
||||||
|
_: &'t HttpRequest,
|
||||||
|
mut field: Field,
|
||||||
|
_limits: &'t mut Limits,
|
||||||
|
) -> Self::Future {
|
||||||
|
Box::pin(async move {
|
||||||
|
// exhaust field stream
|
||||||
|
while let Some(_chunk) = field.try_next().await? {}
|
||||||
|
|
||||||
|
// poll again, crash
|
||||||
|
let _post = field.try_next().await;
|
||||||
|
|
||||||
|
Ok(Self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(MultipartForm)]
|
||||||
|
struct NullSinkForm {
|
||||||
|
foo: NullSink,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn null_sink(_form: MultipartForm<NullSinkForm>) -> impl Responder {
|
||||||
|
"unreachable"
|
||||||
|
}
|
||||||
|
|
||||||
|
let srv = actix_test::start(|| App::new().service(Resource::new("/").post(null_sink)));
|
||||||
|
|
||||||
|
let mut form = multipart::Form::default();
|
||||||
|
form.add_text("foo", "data is not important to this test");
|
||||||
|
|
||||||
|
// panics with Err(Connect(Disconnected)) due to form NullSink panic
|
||||||
|
let _res = send_form(&srv, form, "/").await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
||||||
use derive_more::{Display, Error};
|
use derive_more::derive::{Display, Error};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use futures_util::TryStreamExt as _;
|
use futures_util::TryStreamExt as _;
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
|
@ -42,38 +42,36 @@ impl<'t> FieldReader<'t> for TempFile {
|
||||||
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
|
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let config = TempFileConfig::from_req(req);
|
let config = TempFileConfig::from_req(req);
|
||||||
let field_name = field.name().to_owned();
|
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
|
|
||||||
let file = config
|
let file = config.create_tempfile().map_err(|err| {
|
||||||
.create_tempfile()
|
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||||
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
|
})?;
|
||||||
|
|
||||||
let mut file_async =
|
let mut file_async = tokio::fs::File::from_std(file.reopen().map_err(|err| {
|
||||||
tokio::fs::File::from_std(file.reopen().map_err(|err| {
|
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||||
config.map_error(req, &field_name, TempFileError::FileIo(err))
|
})?);
|
||||||
})?);
|
|
||||||
|
|
||||||
while let Some(chunk) = field.try_next().await? {
|
while let Some(chunk) = field.try_next().await? {
|
||||||
limits.try_consume_limits(chunk.len(), false)?;
|
limits.try_consume_limits(chunk.len(), false)?;
|
||||||
size += chunk.len();
|
size += chunk.len();
|
||||||
file_async.write_all(chunk.as_ref()).await.map_err(|err| {
|
file_async.write_all(chunk.as_ref()).await.map_err(|err| {
|
||||||
config.map_error(req, &field_name, TempFileError::FileIo(err))
|
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
file_async
|
file_async.flush().await.map_err(|err| {
|
||||||
.flush()
|
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||||
.await
|
})?;
|
||||||
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
|
|
||||||
|
|
||||||
Ok(TempFile {
|
Ok(TempFile {
|
||||||
file,
|
file,
|
||||||
content_type: field.content_type().map(ToOwned::to_owned),
|
content_type: field.content_type().map(ToOwned::to_owned),
|
||||||
file_name: field
|
file_name: field
|
||||||
.content_disposition()
|
.content_disposition()
|
||||||
|
.expect("multipart form fields should have a content-disposition header")
|
||||||
.get_filename()
|
.get_filename()
|
||||||
.map(str::to_owned),
|
.map(ToOwned::to_owned),
|
||||||
size,
|
size,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -84,7 +82,7 @@ impl<'t> FieldReader<'t> for TempFile {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum TempFileError {
|
pub enum TempFileError {
|
||||||
/// File I/O Error
|
/// File I/O Error
|
||||||
#[display(fmt = "File I/O error: {}", _0)]
|
#[display("File I/O error: {}", _0)]
|
||||||
FileIo(std::io::Error),
|
FileIo(std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,7 +135,7 @@ impl TempFileConfig {
|
||||||
};
|
};
|
||||||
|
|
||||||
MultipartError::Field {
|
MultipartError::Field {
|
||||||
field_name: field_name.to_owned(),
|
name: field_name.to_owned(),
|
||||||
source,
|
source,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::{str, sync::Arc};
|
use std::{str, sync::Arc};
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
use actix_web::{http::StatusCode, web, Error, HttpRequest, ResponseError};
|
||||||
use derive_more::{Deref, DerefMut, Display, Error};
|
use derive_more::derive::{Deref, DerefMut, Display, Error};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
|
|
||||||
|
@ -36,7 +36,6 @@ where
|
||||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let config = TextConfig::from_req(req);
|
let config = TextConfig::from_req(req);
|
||||||
let field_name = field.name().to_owned();
|
|
||||||
|
|
||||||
if config.validate_content_type {
|
if config.validate_content_type {
|
||||||
let valid = if let Some(mime) = field.content_type() {
|
let valid = if let Some(mime) = field.content_type() {
|
||||||
|
@ -49,22 +48,24 @@ where
|
||||||
|
|
||||||
if !valid {
|
if !valid {
|
||||||
return Err(MultipartError::Field {
|
return Err(MultipartError::Field {
|
||||||
field_name,
|
name: field.form_field_name,
|
||||||
source: config.map_error(req, TextError::ContentType),
|
source: config.map_error(req, TextError::ContentType),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let form_field_name = field.form_field_name.clone();
|
||||||
|
|
||||||
let bytes = Bytes::read_field(req, field, limits).await?;
|
let bytes = Bytes::read_field(req, field, limits).await?;
|
||||||
|
|
||||||
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
|
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
|
||||||
field_name: field_name.clone(),
|
name: form_field_name.clone(),
|
||||||
source: config.map_error(req, TextError::Utf8Error(err)),
|
source: config.map_error(req, TextError::Utf8Error(err)),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(Text(serde_plain::from_str(text).map_err(|err| {
|
Ok(Text(serde_plain::from_str(text).map_err(|err| {
|
||||||
MultipartError::Field {
|
MultipartError::Field {
|
||||||
field_name,
|
name: form_field_name,
|
||||||
source: config.map_error(req, TextError::Deserialize(err)),
|
source: config.map_error(req, TextError::Deserialize(err)),
|
||||||
}
|
}
|
||||||
})?))
|
})?))
|
||||||
|
@ -76,15 +77,15 @@ where
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum TextError {
|
pub enum TextError {
|
||||||
/// UTF-8 decoding error.
|
/// UTF-8 decoding error.
|
||||||
#[display(fmt = "UTF-8 decoding error: {}", _0)]
|
#[display("UTF-8 decoding error: {}", _0)]
|
||||||
Utf8Error(str::Utf8Error),
|
Utf8Error(str::Utf8Error),
|
||||||
|
|
||||||
/// Deserialize error.
|
/// Deserialize error.
|
||||||
#[display(fmt = "Plain text deserialize error: {}", _0)]
|
#[display("Plain text deserialize error: {}", _0)]
|
||||||
Deserialize(serde_plain::Error),
|
Deserialize(serde_plain::Error),
|
||||||
|
|
||||||
/// Content type error.
|
/// Content type error.
|
||||||
#[display(fmt = "Content type error")]
|
#[display("Content type error")]
|
||||||
ContentType,
|
ContentType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,19 @@
|
||||||
//! Multipart form support for Actix Web.
|
//! Multipart request & form support for Actix Web.
|
||||||
|
//!
|
||||||
|
//! The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including
|
||||||
|
//! `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level
|
||||||
|
//! extractor which supports reading [multipart fields](Field), in the order they are sent by the
|
||||||
|
//! client.
|
||||||
|
//!
|
||||||
|
//! Due to additional requirements for `multipart/form-data` requests, the higher level
|
||||||
|
//! [`MultipartForm`] extractor and derive macro only supports this media type.
|
||||||
|
//!
|
||||||
//! # Examples
|
//! # Examples
|
||||||
|
//!
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! use actix_web::{post, App, HttpServer, Responder};
|
//! use actix_web::{post, App, HttpServer, Responder};
|
||||||
//!
|
//!
|
||||||
//! use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
|
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
|
||||||
//! use serde::Deserialize;
|
//! use serde::Deserialize;
|
||||||
//!
|
//!
|
||||||
//! #[derive(Debug, Deserialize)]
|
//! #[derive(Debug, Deserialize)]
|
||||||
|
@ -15,7 +25,7 @@
|
||||||
//! struct UploadForm {
|
//! struct UploadForm {
|
||||||
//! #[multipart(limit = "100MB")]
|
//! #[multipart(limit = "100MB")]
|
||||||
//! file: TempFile,
|
//! file: TempFile,
|
||||||
//! json: MPJson<Metadata>,
|
//! json: MpJson<Metadata>,
|
||||||
//! }
|
//! }
|
||||||
//!
|
//!
|
||||||
//! #[post("/videos")]
|
//! #[post("/videos")]
|
||||||
|
@ -34,10 +44,18 @@
|
||||||
//! .await
|
//! .await
|
||||||
//! }
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
|
//!
|
||||||
|
//! cURL request:
|
||||||
|
//!
|
||||||
|
//! ```sh
|
||||||
|
//! curl -v --request POST \
|
||||||
|
//! --url http://localhost:8080/videos \
|
||||||
|
//! -F 'json={"name": "Cargo.lock"};type=application/json' \
|
||||||
|
//! -F file=@./Cargo.lock
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! [`MultipartForm`]: struct@form::MultipartForm
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![allow(clippy::borrow_interior_mutable_const)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -48,14 +66,15 @@ extern crate self as actix_multipart;
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
mod extractor;
|
mod extractor;
|
||||||
|
pub(crate) mod field;
|
||||||
pub mod form;
|
pub mod form;
|
||||||
mod server;
|
mod multipart;
|
||||||
|
pub(crate) mod payload;
|
||||||
|
pub(crate) mod safety;
|
||||||
pub mod test;
|
pub mod test;
|
||||||
|
|
||||||
pub use self::{
|
pub use self::{
|
||||||
error::MultipartError,
|
error::Error as MultipartError,
|
||||||
server::{Field, Multipart},
|
field::{Field, LimitExceeded},
|
||||||
test::{
|
multipart::Multipart,
|
||||||
create_form_data_payload_and_headers, create_form_data_payload_and_headers_with_boundary,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
883
actix-multipart/src/multipart.rs
Normal file
883
actix-multipart/src/multipart.rs
Normal file
|
@ -0,0 +1,883 @@
|
||||||
|
//! Multipart response payload support.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
|
pin::Pin,
|
||||||
|
rc::Rc,
|
||||||
|
task::{Context, Poll},
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_web::{
|
||||||
|
dev,
|
||||||
|
error::{ParseError, PayloadError},
|
||||||
|
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
|
||||||
|
web::Bytes,
|
||||||
|
HttpRequest,
|
||||||
|
};
|
||||||
|
use futures_core::stream::Stream;
|
||||||
|
use mime::Mime;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
field::InnerField,
|
||||||
|
payload::{PayloadBuffer, PayloadRef},
|
||||||
|
safety::Safety,
|
||||||
|
Field,
|
||||||
|
};
|
||||||
|
|
||||||
|
const MAX_HEADERS: usize = 32;
|
||||||
|
|
||||||
|
/// The server-side implementation of `multipart/form-data` requests.
|
||||||
|
///
|
||||||
|
/// This will parse the incoming stream into `MultipartItem` instances via its `Stream`
|
||||||
|
/// implementation. `MultipartItem::Field` contains multipart field. `MultipartItem::Multipart` is
|
||||||
|
/// used for nested multipart streams.
|
||||||
|
pub struct Multipart {
|
||||||
|
flow: Flow,
|
||||||
|
safety: Safety,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Flow {
|
||||||
|
InFlight(Inner),
|
||||||
|
|
||||||
|
/// Error container is Some until an error is returned out of the flow.
|
||||||
|
Error(Option<Error>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Multipart {
|
||||||
|
/// Creates multipart instance from parts.
|
||||||
|
pub fn new<S>(headers: &HeaderMap, stream: S) -> Self
|
||||||
|
where
|
||||||
|
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||||
|
{
|
||||||
|
match Self::find_ct_and_boundary(headers) {
|
||||||
|
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, stream),
|
||||||
|
Err(err) => Self::from_error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates multipart instance from parts.
|
||||||
|
pub(crate) fn from_req(req: &HttpRequest, payload: &mut dev::Payload) -> Self {
|
||||||
|
match Self::find_ct_and_boundary(req.headers()) {
|
||||||
|
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, payload.take()),
|
||||||
|
Err(err) => Self::from_error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract Content-Type and boundary info from headers.
|
||||||
|
pub(crate) fn find_ct_and_boundary(headers: &HeaderMap) -> Result<(Mime, String), Error> {
|
||||||
|
let content_type = headers
|
||||||
|
.get(&header::CONTENT_TYPE)
|
||||||
|
.ok_or(Error::ContentTypeMissing)?
|
||||||
|
.to_str()
|
||||||
|
.ok()
|
||||||
|
.and_then(|content_type| content_type.parse::<Mime>().ok())
|
||||||
|
.ok_or(Error::ContentTypeParse)?;
|
||||||
|
|
||||||
|
if content_type.type_() != mime::MULTIPART {
|
||||||
|
return Err(Error::ContentTypeIncompatible);
|
||||||
|
}
|
||||||
|
|
||||||
|
let boundary = content_type
|
||||||
|
.get_param(mime::BOUNDARY)
|
||||||
|
.ok_or(Error::BoundaryMissing)?
|
||||||
|
.as_str()
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
Ok((content_type, boundary))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Constructs a new multipart reader from given Content-Type, boundary, and stream.
|
||||||
|
pub(crate) fn from_ct_and_boundary<S>(ct: Mime, boundary: String, stream: S) -> Multipart
|
||||||
|
where
|
||||||
|
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||||
|
{
|
||||||
|
Multipart {
|
||||||
|
safety: Safety::new(),
|
||||||
|
flow: Flow::InFlight(Inner {
|
||||||
|
payload: PayloadRef::new(PayloadBuffer::new(stream)),
|
||||||
|
content_type: ct,
|
||||||
|
boundary,
|
||||||
|
state: State::FirstBoundary,
|
||||||
|
item: Item::None,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Constructs a new multipart reader from given `MultipartError`.
|
||||||
|
pub(crate) fn from_error(err: Error) -> Multipart {
|
||||||
|
Multipart {
|
||||||
|
flow: Flow::Error(Some(err)),
|
||||||
|
safety: Safety::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return requests parsed Content-Type or raise the stored error.
|
||||||
|
pub(crate) fn content_type_or_bail(&mut self) -> Result<mime::Mime, Error> {
|
||||||
|
match self.flow {
|
||||||
|
Flow::InFlight(ref inner) => Ok(inner.content_type.clone()),
|
||||||
|
Flow::Error(ref mut err) => Err(err
|
||||||
|
.take()
|
||||||
|
.expect("error should not be taken after it was returned")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Stream for Multipart {
|
||||||
|
type Item = Result<Field, Error>;
|
||||||
|
|
||||||
|
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
|
let this = self.get_mut();
|
||||||
|
|
||||||
|
match this.flow {
|
||||||
|
Flow::InFlight(ref mut inner) => {
|
||||||
|
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
|
||||||
|
// check safety and poll read payload to buffer.
|
||||||
|
buffer.poll_stream(cx)?;
|
||||||
|
} else if !this.safety.is_clean() {
|
||||||
|
// safety violation
|
||||||
|
return Poll::Ready(Some(Err(Error::NotConsumed)));
|
||||||
|
} else {
|
||||||
|
return Poll::Pending;
|
||||||
|
}
|
||||||
|
|
||||||
|
inner.poll(&this.safety, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
Flow::Error(ref mut err) => Poll::Ready(Some(Err(err
|
||||||
|
.take()
|
||||||
|
.expect("Multipart polled after finish")))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug)]
|
||||||
|
enum State {
|
||||||
|
/// Skip data until first boundary.
|
||||||
|
FirstBoundary,
|
||||||
|
|
||||||
|
/// Reading boundary.
|
||||||
|
Boundary,
|
||||||
|
|
||||||
|
/// Reading Headers.
|
||||||
|
Headers,
|
||||||
|
|
||||||
|
/// Stream EOF.
|
||||||
|
Eof,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Item {
|
||||||
|
None,
|
||||||
|
Field(Rc<RefCell<InnerField>>),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Inner {
|
||||||
|
/// Request's payload stream & buffer.
|
||||||
|
payload: PayloadRef,
|
||||||
|
|
||||||
|
/// Request's Content-Type.
|
||||||
|
///
|
||||||
|
/// Guaranteed to have "multipart" top-level media type, i.e., `multipart/*`.
|
||||||
|
content_type: Mime,
|
||||||
|
|
||||||
|
/// Field boundary.
|
||||||
|
boundary: String,
|
||||||
|
|
||||||
|
state: State,
|
||||||
|
item: Item,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Inner {
|
||||||
|
fn read_field_headers(payload: &mut PayloadBuffer) -> Result<Option<HeaderMap>, Error> {
|
||||||
|
match payload.read_until(b"\r\n\r\n")? {
|
||||||
|
None => {
|
||||||
|
if payload.eof {
|
||||||
|
Err(Error::Incomplete)
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(bytes) => {
|
||||||
|
let mut hdrs = [httparse::EMPTY_HEADER; MAX_HEADERS];
|
||||||
|
|
||||||
|
match httparse::parse_headers(&bytes, &mut hdrs).map_err(ParseError::from)? {
|
||||||
|
httparse::Status::Complete((_, hdrs)) => {
|
||||||
|
// convert headers
|
||||||
|
let mut headers = HeaderMap::with_capacity(hdrs.len());
|
||||||
|
|
||||||
|
for h in hdrs {
|
||||||
|
let name =
|
||||||
|
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
|
||||||
|
let value =
|
||||||
|
HeaderValue::try_from(h.value).map_err(|_| ParseError::Header)?;
|
||||||
|
headers.append(name, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(headers))
|
||||||
|
}
|
||||||
|
|
||||||
|
httparse::Status::Partial => Err(ParseError::Header.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads a field boundary from the payload buffer (and discards it).
|
||||||
|
///
|
||||||
|
/// Reads "in-between" and "final" boundaries. E.g. for boundary = "foo":
|
||||||
|
///
|
||||||
|
/// ```plain
|
||||||
|
/// --foo <-- in-between fields
|
||||||
|
/// --foo-- <-- end of request body, should be followed by EOF
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Returns:
|
||||||
|
///
|
||||||
|
/// - `Ok(Some(true))` - final field boundary read (EOF)
|
||||||
|
/// - `Ok(Some(false))` - field boundary read
|
||||||
|
/// - `Ok(None)` - boundary not found, more data needs reading
|
||||||
|
/// - `Err(BoundaryMissing)` - multipart boundary is missing
|
||||||
|
fn read_boundary(payload: &mut PayloadBuffer, boundary: &str) -> Result<Option<bool>, Error> {
|
||||||
|
// TODO: need to read epilogue
|
||||||
|
let chunk = match payload.readline_or_eof()? {
|
||||||
|
// TODO: this might be okay as a let Some() else return Ok(None)
|
||||||
|
None => return Ok(payload.eof.then_some(true)),
|
||||||
|
Some(chunk) => chunk,
|
||||||
|
};
|
||||||
|
|
||||||
|
const BOUNDARY_MARKER: &[u8] = b"--";
|
||||||
|
const LINE_BREAK: &[u8] = b"\r\n";
|
||||||
|
|
||||||
|
let boundary_len = boundary.len();
|
||||||
|
|
||||||
|
if chunk.len() < boundary_len + 2 + 2
|
||||||
|
|| !chunk.starts_with(BOUNDARY_MARKER)
|
||||||
|
|| &chunk[2..boundary_len + 2] != boundary.as_bytes()
|
||||||
|
{
|
||||||
|
return Err(Error::BoundaryMissing);
|
||||||
|
}
|
||||||
|
|
||||||
|
// chunk facts:
|
||||||
|
// - long enough to contain boundary + 2 markers or 1 marker and line-break
|
||||||
|
// - starts with boundary marker
|
||||||
|
// - chunk contains correct boundary
|
||||||
|
|
||||||
|
if &chunk[boundary_len + 2..] == LINE_BREAK {
|
||||||
|
// boundary is followed by line-break, indicating more fields to come
|
||||||
|
return Ok(Some(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
// boundary is followed by marker
|
||||||
|
if &chunk[boundary_len + 2..boundary_len + 4] == BOUNDARY_MARKER
|
||||||
|
&& (
|
||||||
|
// chunk is exactly boundary len + 2 markers
|
||||||
|
chunk.len() == boundary_len + 2 + 2
|
||||||
|
// final boundary is allowed to end with a line-break
|
||||||
|
|| &chunk[boundary_len + 4..] == LINE_BREAK
|
||||||
|
)
|
||||||
|
{
|
||||||
|
return Ok(Some(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(Error::BoundaryMissing)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn skip_until_boundary(
|
||||||
|
payload: &mut PayloadBuffer,
|
||||||
|
boundary: &str,
|
||||||
|
) -> Result<Option<bool>, Error> {
|
||||||
|
let mut eof = false;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match payload.readline()? {
|
||||||
|
Some(chunk) => {
|
||||||
|
if chunk.is_empty() {
|
||||||
|
return Err(Error::BoundaryMissing);
|
||||||
|
}
|
||||||
|
if chunk.len() < boundary.len() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if &chunk[..2] == b"--" && &chunk[2..chunk.len() - 2] == boundary.as_bytes() {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
if chunk.len() < boundary.len() + 2 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let b: &[u8] = boundary.as_ref();
|
||||||
|
if &chunk[..boundary.len()] == b
|
||||||
|
&& &chunk[boundary.len()..boundary.len() + 2] == b"--"
|
||||||
|
{
|
||||||
|
eof = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return if payload.eof {
|
||||||
|
Err(Error::Incomplete)
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Some(eof))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn poll(&mut self, safety: &Safety, cx: &Context<'_>) -> Poll<Option<Result<Field, Error>>> {
|
||||||
|
if self.state == State::Eof {
|
||||||
|
Poll::Ready(None)
|
||||||
|
} else {
|
||||||
|
// release field
|
||||||
|
loop {
|
||||||
|
// Nested multipart streams of fields has to be consumed
|
||||||
|
// before switching to next
|
||||||
|
if safety.current() {
|
||||||
|
let stop = match self.item {
|
||||||
|
Item::Field(ref mut field) => match field.borrow_mut().poll(safety) {
|
||||||
|
Poll::Pending => return Poll::Pending,
|
||||||
|
Poll::Ready(Some(Ok(_))) => continue,
|
||||||
|
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
|
||||||
|
Poll::Ready(None) => true,
|
||||||
|
},
|
||||||
|
Item::None => false,
|
||||||
|
};
|
||||||
|
if stop {
|
||||||
|
self.item = Item::None;
|
||||||
|
}
|
||||||
|
if let Item::None = self.item {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let field_headers = if let Some(mut payload) = self.payload.get_mut(safety) {
|
||||||
|
match self.state {
|
||||||
|
// read until first boundary
|
||||||
|
State::FirstBoundary => {
|
||||||
|
match Inner::skip_until_boundary(&mut payload, &self.boundary)? {
|
||||||
|
None => return Poll::Pending,
|
||||||
|
Some(eof) => {
|
||||||
|
if eof {
|
||||||
|
self.state = State::Eof;
|
||||||
|
return Poll::Ready(None);
|
||||||
|
} else {
|
||||||
|
self.state = State::Headers;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// read boundary
|
||||||
|
State::Boundary => match Inner::read_boundary(&mut payload, &self.boundary)? {
|
||||||
|
None => return Poll::Pending,
|
||||||
|
Some(eof) => {
|
||||||
|
if eof {
|
||||||
|
self.state = State::Eof;
|
||||||
|
return Poll::Ready(None);
|
||||||
|
} else {
|
||||||
|
self.state = State::Headers;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// read field headers for next field
|
||||||
|
if self.state == State::Headers {
|
||||||
|
if let Some(headers) = Inner::read_field_headers(&mut payload)? {
|
||||||
|
self.state = State::Boundary;
|
||||||
|
headers
|
||||||
|
} else {
|
||||||
|
return Poll::Pending;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log::debug!("NotReady: field is in flight");
|
||||||
|
return Poll::Pending;
|
||||||
|
};
|
||||||
|
|
||||||
|
let field_content_disposition = field_headers
|
||||||
|
.get(&header::CONTENT_DISPOSITION)
|
||||||
|
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
|
||||||
|
.filter(|content_disposition| {
|
||||||
|
matches!(
|
||||||
|
content_disposition.disposition,
|
||||||
|
header::DispositionType::FormData,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let form_field_name = if self.content_type.subtype() == mime::FORM_DATA {
|
||||||
|
// According to RFC 7578 §4.2, which relates to "multipart/form-data" requests
|
||||||
|
// specifically, fields must have a Content-Disposition header, its disposition
|
||||||
|
// type must be set as "form-data", and it must have a name parameter.
|
||||||
|
|
||||||
|
let Some(cd) = &field_content_disposition else {
|
||||||
|
return Poll::Ready(Some(Err(Error::ContentDispositionMissing)));
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(field_name) = cd.get_name() else {
|
||||||
|
return Poll::Ready(Some(Err(Error::ContentDispositionNameMissing)));
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(field_name.to_owned())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: check out other multipart/* RFCs for specific requirements
|
||||||
|
|
||||||
|
let field_content_type: Option<Mime> = field_headers
|
||||||
|
.get(&header::CONTENT_TYPE)
|
||||||
|
.and_then(|ct| ct.to_str().ok())
|
||||||
|
.and_then(|ct| ct.parse().ok());
|
||||||
|
|
||||||
|
self.state = State::Boundary;
|
||||||
|
|
||||||
|
// nested multipart stream is not supported
|
||||||
|
if let Some(mime) = &field_content_type {
|
||||||
|
if mime.type_() == mime::MULTIPART {
|
||||||
|
return Poll::Ready(Some(Err(Error::Nested)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let field_inner =
|
||||||
|
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &field_headers)?;
|
||||||
|
|
||||||
|
self.item = Item::Field(Rc::clone(&field_inner));
|
||||||
|
|
||||||
|
Poll::Ready(Some(Ok(Field::new(
|
||||||
|
field_content_type,
|
||||||
|
field_content_disposition,
|
||||||
|
form_field_name,
|
||||||
|
field_headers,
|
||||||
|
safety.clone(cx),
|
||||||
|
field_inner,
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Inner {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// InnerMultipartItem::Field has to be dropped first because of Safety.
|
||||||
|
self.item = Item::None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use actix_http::h1;
|
||||||
|
use actix_web::{
|
||||||
|
http::header::{DispositionParam, DispositionType},
|
||||||
|
rt,
|
||||||
|
test::TestRequest,
|
||||||
|
web::{BufMut as _, BytesMut},
|
||||||
|
FromRequest,
|
||||||
|
};
|
||||||
|
use assert_matches::assert_matches;
|
||||||
|
use futures_test::stream::StreamTestExt as _;
|
||||||
|
use futures_util::{stream, StreamExt as _};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
const BOUNDARY: &str = "abbc761f78ff4d7cb7573b5a23f96ef0";
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_boundary() {
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
match Multipart::find_ct_and_boundary(&headers) {
|
||||||
|
Err(Error::ContentTypeMissing) => {}
|
||||||
|
_ => unreachable!("should not happen"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static("test"),
|
||||||
|
);
|
||||||
|
|
||||||
|
match Multipart::find_ct_and_boundary(&headers) {
|
||||||
|
Err(Error::ContentTypeParse) => {}
|
||||||
|
_ => unreachable!("should not happen"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static("multipart/mixed"),
|
||||||
|
);
|
||||||
|
match Multipart::find_ct_and_boundary(&headers) {
|
||||||
|
Err(Error::BoundaryMissing) => {}
|
||||||
|
_ => unreachable!("should not happen"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/mixed; boundary=\"5c02368e880e436dab70ed54e1c58209\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Multipart::find_ct_and_boundary(&headers).unwrap().1,
|
||||||
|
"5c02368e880e436dab70ed54e1c58209",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_stream() -> (
|
||||||
|
mpsc::UnboundedSender<Result<Bytes, PayloadError>>,
|
||||||
|
impl Stream<Item = Result<Bytes, PayloadError>>,
|
||||||
|
) {
|
||||||
|
let (tx, rx) = mpsc::unbounded_channel();
|
||||||
|
|
||||||
|
(
|
||||||
|
tx,
|
||||||
|
UnboundedReceiverStream::new(rx).map(|res| res.map_err(|_| panic!())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_simple_request_with_header() -> (Bytes, HeaderMap) {
|
||||||
|
let (body, headers) = crate::test::create_form_data_payload_and_headers_with_boundary(
|
||||||
|
BOUNDARY,
|
||||||
|
"file",
|
||||||
|
Some("fn.txt".to_owned()),
|
||||||
|
Some(mime::TEXT_PLAIN_UTF_8),
|
||||||
|
Bytes::from_static(b"data"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut buf = BytesMut::with_capacity(body.len() + 14);
|
||||||
|
|
||||||
|
// add junk before form to test pre-boundary data rejection
|
||||||
|
buf.put("testasdadsad\r\n".as_bytes());
|
||||||
|
|
||||||
|
buf.put(body);
|
||||||
|
|
||||||
|
(buf.freeze(), headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: use test utility when multi-file support is introduced
|
||||||
|
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
|
||||||
|
let bytes = Bytes::from(
|
||||||
|
"testasdadsad\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||||
|
test\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||||
|
data\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
|
||||||
|
);
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
(bytes, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_multipart_no_end_crlf() {
|
||||||
|
let (sender, payload) = create_stream();
|
||||||
|
let (mut bytes, headers) = create_double_request_with_header();
|
||||||
|
let bytes_stripped = bytes.split_to(bytes.len()); // strip crlf
|
||||||
|
|
||||||
|
sender.send(Ok(bytes_stripped)).unwrap();
|
||||||
|
drop(sender); // eof
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
|
||||||
|
match multipart.next().await.unwrap() {
|
||||||
|
Ok(_) => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await.unwrap() {
|
||||||
|
Ok(_) => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await {
|
||||||
|
None => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_multipart() {
|
||||||
|
let (sender, payload) = create_stream();
|
||||||
|
let (bytes, headers) = create_double_request_with_header();
|
||||||
|
|
||||||
|
sender.send(Ok(bytes)).unwrap();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
match multipart.next().await {
|
||||||
|
Some(Ok(mut field)) => {
|
||||||
|
let cd = field.content_disposition().unwrap();
|
||||||
|
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||||
|
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||||
|
|
||||||
|
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||||
|
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||||
|
|
||||||
|
match field.next().await.unwrap() {
|
||||||
|
Ok(chunk) => assert_eq!(chunk, "test"),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
match field.next().await {
|
||||||
|
None => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await.unwrap() {
|
||||||
|
Ok(mut field) => {
|
||||||
|
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||||
|
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||||
|
|
||||||
|
match field.next().await {
|
||||||
|
Some(Ok(chunk)) => assert_eq!(chunk, "data"),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
match field.next().await {
|
||||||
|
None => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await {
|
||||||
|
None => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loops, collecting all bytes until end-of-field
|
||||||
|
async fn get_whole_field(field: &mut Field) -> BytesMut {
|
||||||
|
let mut b = BytesMut::new();
|
||||||
|
loop {
|
||||||
|
match field.next().await {
|
||||||
|
Some(Ok(chunk)) => b.extend_from_slice(&chunk),
|
||||||
|
None => return b,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_stream() {
|
||||||
|
let (bytes, headers) = create_double_request_with_header();
|
||||||
|
let payload = stream::iter(bytes)
|
||||||
|
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||||
|
.interleave_pending();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
match multipart.next().await.unwrap() {
|
||||||
|
Ok(mut field) => {
|
||||||
|
let cd = field.content_disposition().unwrap();
|
||||||
|
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||||
|
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||||
|
|
||||||
|
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||||
|
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||||
|
|
||||||
|
assert_eq!(get_whole_field(&mut field).await, "test");
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await {
|
||||||
|
Some(Ok(mut field)) => {
|
||||||
|
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||||
|
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||||
|
|
||||||
|
assert_eq!(get_whole_field(&mut field).await, "data");
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match multipart.next().await {
|
||||||
|
None => {}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_multipart_from_error() {
|
||||||
|
let err = Error::ContentTypeMissing;
|
||||||
|
let mut multipart = Multipart::from_error(err);
|
||||||
|
assert!(multipart.next().await.unwrap().is_err())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_multipart_from_boundary() {
|
||||||
|
let (_, payload) = create_stream();
|
||||||
|
let (_, headers) = create_simple_request_with_header();
|
||||||
|
let (ct, boundary) = Multipart::find_ct_and_boundary(&headers).unwrap();
|
||||||
|
let _ = Multipart::from_ct_and_boundary(ct, boundary, payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_multipart_payload_consumption() {
|
||||||
|
// with sample payload and HttpRequest with no headers
|
||||||
|
let (_, inner_payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = actix_web::dev::Payload::from(inner_payload);
|
||||||
|
let req = TestRequest::default().to_http_request();
|
||||||
|
|
||||||
|
// multipart should generate an error
|
||||||
|
let mut mp = Multipart::from_request(&req, &mut payload).await.unwrap();
|
||||||
|
assert!(mp.next().await.unwrap().is_err());
|
||||||
|
|
||||||
|
// and should not consume the payload
|
||||||
|
match payload {
|
||||||
|
actix_web::dev::Payload::H1 { .. } => {} //expected
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn no_content_disposition_form_data() {
|
||||||
|
let bytes = Bytes::from(
|
||||||
|
"testasdadsad\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\n\
|
||||||
|
Content-Length: 4\r\n\
|
||||||
|
\r\n\
|
||||||
|
test\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||||
|
);
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
let payload = stream::iter(bytes)
|
||||||
|
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||||
|
.interleave_pending();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
let res = multipart.next().await.unwrap();
|
||||||
|
assert_matches!(
|
||||||
|
res.expect_err(
|
||||||
|
"according to RFC 7578, form-data fields require a content-disposition header"
|
||||||
|
),
|
||||||
|
Error::ContentDispositionMissing
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn no_content_disposition_non_form_data() {
|
||||||
|
let bytes = Bytes::from(
|
||||||
|
"testasdadsad\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\n\
|
||||||
|
Content-Length: 4\r\n\
|
||||||
|
\r\n\
|
||||||
|
test\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||||
|
);
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
let payload = stream::iter(bytes)
|
||||||
|
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||||
|
.interleave_pending();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
let res = multipart.next().await.unwrap();
|
||||||
|
res.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn no_name_in_form_data_content_disposition() {
|
||||||
|
let bytes = Bytes::from(
|
||||||
|
"testasdadsad\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||||
|
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
|
||||||
|
Content-Type: text/plain; charset=utf-8\r\n\
|
||||||
|
Content-Length: 4\r\n\
|
||||||
|
\r\n\
|
||||||
|
test\r\n\
|
||||||
|
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||||
|
);
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
let payload = stream::iter(bytes)
|
||||||
|
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||||
|
.interleave_pending();
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
let res = multipart.next().await.unwrap();
|
||||||
|
assert_matches!(
|
||||||
|
res.expect_err("according to RFC 7578, form-data fields require a name attribute"),
|
||||||
|
Error::ContentDispositionNameMissing
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_drop_multipart_dont_hang() {
|
||||||
|
let (sender, payload) = create_stream();
|
||||||
|
let (bytes, headers) = create_simple_request_with_header();
|
||||||
|
sender.send(Ok(bytes)).unwrap();
|
||||||
|
drop(sender); // eof
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
let mut field = multipart.next().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
drop(multipart);
|
||||||
|
|
||||||
|
// should fail immediately
|
||||||
|
match field.next().await {
|
||||||
|
Some(Err(Error::NotConsumed)) => {}
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_drop_field_awaken_multipart() {
|
||||||
|
let (sender, payload) = create_stream();
|
||||||
|
let (bytes, headers) = create_double_request_with_header();
|
||||||
|
sender.send(Ok(bytes)).unwrap();
|
||||||
|
drop(sender); // eof
|
||||||
|
|
||||||
|
let mut multipart = Multipart::new(&headers, payload);
|
||||||
|
let mut field = multipart.next().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
let task = rt::spawn(async move {
|
||||||
|
rt::time::sleep(Duration::from_millis(500)).await;
|
||||||
|
assert_eq!(field.next().await.unwrap().unwrap(), "test");
|
||||||
|
drop(field);
|
||||||
|
});
|
||||||
|
|
||||||
|
// dropping field should awaken current task
|
||||||
|
let _ = multipart.next().await.unwrap().unwrap();
|
||||||
|
task.await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
255
actix-multipart/src/payload.rs
Normal file
255
actix-multipart/src/payload.rs
Normal file
|
@ -0,0 +1,255 @@
|
||||||
|
use std::{
|
||||||
|
cell::{RefCell, RefMut},
|
||||||
|
cmp, mem,
|
||||||
|
pin::Pin,
|
||||||
|
rc::Rc,
|
||||||
|
task::{Context, Poll},
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_web::{
|
||||||
|
error::PayloadError,
|
||||||
|
web::{Bytes, BytesMut},
|
||||||
|
};
|
||||||
|
use futures_core::stream::{LocalBoxStream, Stream};
|
||||||
|
|
||||||
|
use crate::{error::Error, safety::Safety};
|
||||||
|
|
||||||
|
pub(crate) struct PayloadRef {
|
||||||
|
payload: Rc<RefCell<PayloadBuffer>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PayloadRef {
|
||||||
|
pub(crate) fn new(payload: PayloadBuffer) -> PayloadRef {
|
||||||
|
PayloadRef {
|
||||||
|
payload: Rc::new(RefCell::new(payload)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_mut(&self, safety: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
|
||||||
|
if safety.current() {
|
||||||
|
Some(self.payload.borrow_mut())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for PayloadRef {
|
||||||
|
fn clone(&self) -> PayloadRef {
|
||||||
|
PayloadRef {
|
||||||
|
payload: Rc::clone(&self.payload),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Payload buffer.
|
||||||
|
pub(crate) struct PayloadBuffer {
|
||||||
|
pub(crate) stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,
|
||||||
|
pub(crate) buf: BytesMut,
|
||||||
|
/// EOF flag. If true, no more payload reads will be attempted.
|
||||||
|
pub(crate) eof: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PayloadBuffer {
|
||||||
|
/// Constructs new payload buffer.
|
||||||
|
pub(crate) fn new<S>(stream: S) -> Self
|
||||||
|
where
|
||||||
|
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||||
|
{
|
||||||
|
PayloadBuffer {
|
||||||
|
stream: Box::pin(stream),
|
||||||
|
buf: BytesMut::with_capacity(1_024), // pre-allocate 1KiB
|
||||||
|
eof: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {
|
||||||
|
loop {
|
||||||
|
match Pin::new(&mut self.stream).poll_next(cx) {
|
||||||
|
Poll::Ready(Some(Ok(data))) => {
|
||||||
|
self.buf.extend_from_slice(&data);
|
||||||
|
// try to read more data
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Poll::Ready(Some(Err(err))) => return Err(err),
|
||||||
|
Poll::Ready(None) => {
|
||||||
|
self.eof = true;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Poll::Pending => return Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads exact number of bytes.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub(crate) fn read_exact(&mut self, size: usize) -> Option<Bytes> {
|
||||||
|
if size <= self.buf.len() {
|
||||||
|
Some(self.buf.split_to(size).freeze())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_max(&mut self, size: u64) -> Result<Option<Bytes>, Error> {
|
||||||
|
if !self.buf.is_empty() {
|
||||||
|
let size = cmp::min(self.buf.len() as u64, size) as usize;
|
||||||
|
Ok(Some(self.buf.split_to(size).freeze()))
|
||||||
|
} else if self.eof {
|
||||||
|
Err(Error::Incomplete)
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads until specified ending.
|
||||||
|
///
|
||||||
|
/// Returns:
|
||||||
|
///
|
||||||
|
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
|
||||||
|
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
|
||||||
|
/// - `Ok(None)` - `needle` is not found otherwise
|
||||||
|
pub(crate) fn read_until(&mut self, needle: &[u8]) -> Result<Option<Bytes>, Error> {
|
||||||
|
match memchr::memmem::find(&self.buf, needle) {
|
||||||
|
// buffer exhausted and EOF without finding needle
|
||||||
|
None if self.eof => Err(Error::Incomplete),
|
||||||
|
|
||||||
|
// needle not yet found
|
||||||
|
None => Ok(None),
|
||||||
|
|
||||||
|
// needle found, split chunk out of buf
|
||||||
|
Some(idx) => Ok(Some(self.buf.split_to(idx + needle.len()).freeze())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads bytes until new line delimiter (`\n`, `0x0A`).
|
||||||
|
///
|
||||||
|
/// Returns:
|
||||||
|
///
|
||||||
|
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
|
||||||
|
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
|
||||||
|
/// - `Ok(None)` - `needle` is not found otherwise
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn readline(&mut self) -> Result<Option<Bytes>, Error> {
|
||||||
|
self.read_until(b"\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads bytes until new line delimiter or until EOF.
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn readline_or_eof(&mut self) -> Result<Option<Bytes>, Error> {
|
||||||
|
match self.readline() {
|
||||||
|
Err(Error::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
|
||||||
|
line => line,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Puts unprocessed data back to the buffer.
|
||||||
|
pub(crate) fn unprocessed(&mut self, data: Bytes) {
|
||||||
|
// TODO: use BytesMut::from when it's released, see https://github.com/tokio-rs/bytes/pull/710
|
||||||
|
let buf = BytesMut::from(&data[..]);
|
||||||
|
let buf = mem::replace(&mut self.buf, buf);
|
||||||
|
self.buf.extend_from_slice(&buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use actix_http::h1;
|
||||||
|
use futures_util::future::lazy;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn basic() {
|
||||||
|
let (_, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
|
||||||
|
assert_eq!(payload.buf.len(), 0);
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||||
|
assert_eq!(None, payload.read_max(1).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn eof() {
|
||||||
|
let (mut sender, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
|
||||||
|
assert_eq!(None, payload.read_max(4).unwrap());
|
||||||
|
sender.feed_data(Bytes::from("data"));
|
||||||
|
sender.feed_eof();
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(Some(Bytes::from("data")), payload.read_max(4).unwrap());
|
||||||
|
assert_eq!(payload.buf.len(), 0);
|
||||||
|
assert!(payload.read_max(1).is_err());
|
||||||
|
assert!(payload.eof);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn err() {
|
||||||
|
let (mut sender, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
assert_eq!(None, payload.read_max(1).unwrap());
|
||||||
|
sender.set_error(PayloadError::Incomplete(None));
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.err().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn read_max() {
|
||||||
|
let (mut sender, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
|
||||||
|
sender.feed_data(Bytes::from("line1"));
|
||||||
|
sender.feed_data(Bytes::from("line2"));
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||||
|
assert_eq!(payload.buf.len(), 10);
|
||||||
|
|
||||||
|
assert_eq!(Some(Bytes::from("line1")), payload.read_max(5).unwrap());
|
||||||
|
assert_eq!(payload.buf.len(), 5);
|
||||||
|
|
||||||
|
assert_eq!(Some(Bytes::from("line2")), payload.read_max(5).unwrap());
|
||||||
|
assert_eq!(payload.buf.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn read_exactly() {
|
||||||
|
let (mut sender, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
|
||||||
|
assert_eq!(None, payload.read_exact(2));
|
||||||
|
|
||||||
|
sender.feed_data(Bytes::from("line1"));
|
||||||
|
sender.feed_data(Bytes::from("line2"));
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(Some(Bytes::from_static(b"li")), payload.read_exact(2));
|
||||||
|
assert_eq!(payload.buf.len(), 8);
|
||||||
|
|
||||||
|
assert_eq!(Some(Bytes::from_static(b"ne1l")), payload.read_exact(4));
|
||||||
|
assert_eq!(payload.buf.len(), 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn read_until() {
|
||||||
|
let (mut sender, payload) = h1::Payload::create(false);
|
||||||
|
let mut payload = PayloadBuffer::new(payload);
|
||||||
|
|
||||||
|
assert_eq!(None, payload.read_until(b"ne").unwrap());
|
||||||
|
|
||||||
|
sender.feed_data(Bytes::from("line1"));
|
||||||
|
sender.feed_data(Bytes::from("line2"));
|
||||||
|
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Some(Bytes::from("line")),
|
||||||
|
payload.read_until(b"ne").unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(payload.buf.len(), 6);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Some(Bytes::from("1line2")),
|
||||||
|
payload.read_until(b"2").unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(payload.buf.len(), 0);
|
||||||
|
}
|
||||||
|
}
|
60
actix-multipart/src/safety.rs
Normal file
60
actix-multipart/src/safety.rs
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
use std::{cell::Cell, marker::PhantomData, rc::Rc, task};
|
||||||
|
|
||||||
|
use local_waker::LocalWaker;
|
||||||
|
|
||||||
|
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
|
||||||
|
///
|
||||||
|
/// - When dropped, parent task is awakened. This is to support the case where `Field` is dropped in
|
||||||
|
/// a separate task than `Multipart`.
|
||||||
|
/// - Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
|
||||||
|
/// - If dropped and is not top most owner, is_clean flag is set to false.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct Safety {
|
||||||
|
task: LocalWaker,
|
||||||
|
level: usize,
|
||||||
|
payload: Rc<PhantomData<bool>>,
|
||||||
|
clean: Rc<Cell<bool>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Safety {
|
||||||
|
pub(crate) fn new() -> Safety {
|
||||||
|
let payload = Rc::new(PhantomData);
|
||||||
|
Safety {
|
||||||
|
task: LocalWaker::new(),
|
||||||
|
level: Rc::strong_count(&payload),
|
||||||
|
clean: Rc::new(Cell::new(true)),
|
||||||
|
payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn current(&self) -> bool {
|
||||||
|
Rc::strong_count(&self.payload) == self.level && self.clean.get()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_clean(&self) -> bool {
|
||||||
|
self.clean.get()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn clone(&self, cx: &task::Context<'_>) -> Safety {
|
||||||
|
let payload = Rc::clone(&self.payload);
|
||||||
|
let s = Safety {
|
||||||
|
task: LocalWaker::new(),
|
||||||
|
level: Rc::strong_count(&payload),
|
||||||
|
clean: self.clean.clone(),
|
||||||
|
payload,
|
||||||
|
};
|
||||||
|
s.task.register(cx.waker());
|
||||||
|
s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Safety {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if Rc::strong_count(&self.payload) != self.level {
|
||||||
|
// Multipart dropped leaving a Field
|
||||||
|
self.clean.set(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.task.wake();
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,9 @@
|
||||||
use actix_web::http::header::{self, HeaderMap};
|
//! Multipart testing utilities.
|
||||||
use bytes::{BufMut as _, Bytes, BytesMut};
|
|
||||||
|
use actix_web::{
|
||||||
|
http::header::{self, HeaderMap},
|
||||||
|
web::{BufMut as _, Bytes, BytesMut},
|
||||||
|
};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
use rand::{
|
use rand::{
|
||||||
distributions::{Alphanumeric, DistString as _},
|
distributions::{Alphanumeric, DistString as _},
|
||||||
|
@ -21,8 +25,7 @@ const BOUNDARY_PREFIX: &str = "------------------------";
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_multipart::test::create_form_data_payload_and_headers;
|
/// use actix_multipart::test::create_form_data_payload_and_headers;
|
||||||
/// use actix_web::test::TestRequest;
|
/// use actix_web::{test::TestRequest, web::Bytes};
|
||||||
/// use bytes::Bytes;
|
|
||||||
/// use memchr::memmem::find;
|
/// use memchr::memmem::find;
|
||||||
///
|
///
|
||||||
/// let (body, headers) = create_form_data_payload_and_headers(
|
/// let (body, headers) = create_form_data_payload_and_headers(
|
||||||
|
|
|
@ -12,9 +12,11 @@ repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_router"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"http::*",
|
||||||
|
"serde::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["http", "unicode"]
|
default = ["http", "unicode"]
|
||||||
|
@ -36,6 +38,9 @@ http = "0.2.7"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "router"
|
name = "router"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
|
@ -511,11 +511,6 @@ mod tests {
|
||||||
value: String,
|
value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct Id {
|
|
||||||
_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
struct Test1(String, u32);
|
struct Test1(String, u32);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
//! Resource path matching and router.
|
//! Resource path matching and router.
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
|
@ -143,9 +143,9 @@ impl<T: ResourcePath> Path<T> {
|
||||||
for (seg_name, val) in self.segments.iter() {
|
for (seg_name, val) in self.segments.iter() {
|
||||||
if name == seg_name {
|
if name == seg_name {
|
||||||
return match val {
|
return match val {
|
||||||
PathItem::Static(ref s) => Some(s),
|
PathItem::Static(ref seg) => Some(seg),
|
||||||
PathItem::Segment(s, e) => {
|
PathItem::Segment(start, end) => {
|
||||||
Some(&self.path.path()[(*s as usize)..(*e as usize)])
|
Some(&self.path.path()[(*start as usize)..(*end as usize)])
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -193,8 +193,10 @@ impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
|
||||||
if self.idx < self.params.segment_count() {
|
if self.idx < self.params.segment_count() {
|
||||||
let idx = self.idx;
|
let idx = self.idx;
|
||||||
let res = match self.params.segments[idx].1 {
|
let res = match self.params.segments[idx].1 {
|
||||||
PathItem::Static(ref s) => s,
|
PathItem::Static(ref seg) => seg,
|
||||||
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
|
PathItem::Segment(start, end) => {
|
||||||
|
&self.params.path.path()[(start as usize)..(end as usize)]
|
||||||
|
}
|
||||||
};
|
};
|
||||||
self.idx += 1;
|
self.idx += 1;
|
||||||
return Some((&self.params.segments[idx].0, res));
|
return Some((&self.params.segments[idx].0, res));
|
||||||
|
@ -217,8 +219,8 @@ impl<T: ResourcePath> Index<usize> for Path<T> {
|
||||||
|
|
||||||
fn index(&self, idx: usize) -> &str {
|
fn index(&self, idx: usize) -> &str {
|
||||||
match self.segments[idx].1 {
|
match self.segments[idx].1 {
|
||||||
PathItem::Static(ref s) => s,
|
PathItem::Static(ref seg) => seg,
|
||||||
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
|
PathItem::Segment(start, end) => &self.path.path()[(start as usize)..(end as usize)],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,16 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 0.1.5
|
||||||
|
|
||||||
|
- Add `TestServerConfig::listen_address()` method.
|
||||||
|
|
||||||
|
## 0.1.4
|
||||||
|
|
||||||
- Add `TestServerConfig::rustls_0_23()` method for Rustls v0.23 support behind new `rustls-0_23` crate feature.
|
- Add `TestServerConfig::rustls_0_23()` method for Rustls v0.23 support behind new `rustls-0_23` crate feature.
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Add `TestServerConfig::disable_redirects()` method.
|
||||||
- Various types from `awc`, such as `ClientRequest` and `ClientResponse`, are now re-exported.
|
- Various types from `awc`, such as `ClientRequest` and `ClientResponse`, are now re-exported.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
## 0.1.3
|
## 0.1.3
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-test"
|
name = "actix-test"
|
||||||
version = "0.1.3"
|
version = "0.1.5"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
@ -18,6 +18,22 @@ categories = [
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
[package.metadata.cargo_check_external_types]
|
||||||
|
allowed_external_types = [
|
||||||
|
"actix_codec::*",
|
||||||
|
"actix_http_test::*",
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"awc::*",
|
||||||
|
"bytes::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"http::*",
|
||||||
|
"openssl::*",
|
||||||
|
"rustls::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
|
||||||
|
@ -57,3 +73,6 @@ tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true }
|
||||||
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
|
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
|
||||||
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
|
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
|
||||||
tokio = { version = "1.24.2", features = ["sync"] }
|
tokio = { version = "1.24.2", features = ["sync"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
45
actix-test/README.md
Normal file
45
actix-test/README.md
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# `actix-test`
|
||||||
|
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
|
[![crates.io](https://img.shields.io/crates/v/actix-test?label=latest)](https://crates.io/crates/actix-test)
|
||||||
|
[![Documentation](https://docs.rs/actix-test/badge.svg?version=0.1.5)](https://docs.rs/actix-test/0.1.5)
|
||||||
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-test.svg)
|
||||||
|
<br />
|
||||||
|
[![dependency status](https://deps.rs/crate/actix-test/0.1.5/status.svg)](https://deps.rs/crate/actix-test/0.1.5)
|
||||||
|
[![Download](https://img.shields.io/crates/d/actix-test.svg)](https://crates.io/crates/actix-test)
|
||||||
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
|
<!-- cargo-rdme start -->
|
||||||
|
|
||||||
|
Integration testing tools for Actix Web applications.
|
||||||
|
|
||||||
|
The main integration testing tool is [`TestServer`]. It spawns a real HTTP server on an unused port and provides methods that use a real HTTP client. Therefore, it is much closer to real-world cases than using `init_service`, which skips HTTP encoding and decoding.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
async fn my_handler() -> Result<impl Responder, Error> {
|
||||||
|
Ok(HttpResponse::Ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_example() {
|
||||||
|
let srv = actix_test::start(||
|
||||||
|
App::new().service(my_handler)
|
||||||
|
);
|
||||||
|
|
||||||
|
let req = srv.get("/");
|
||||||
|
let res = req.send().await.unwrap();
|
||||||
|
|
||||||
|
assert!(res.status().is_success());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
|
@ -5,6 +5,7 @@
|
||||||
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
|
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
|
||||||
//!
|
//!
|
||||||
//! # Examples
|
//! # Examples
|
||||||
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
|
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
|
||||||
//!
|
//!
|
||||||
|
@ -26,8 +27,6 @@
|
||||||
//! }
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -149,10 +148,12 @@ where
|
||||||
StreamType::Rustls023(_) => true,
|
StreamType::Rustls023(_) => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let client_cfg = cfg.clone();
|
||||||
|
|
||||||
// run server in separate orphaned thread
|
// run server in separate orphaned thread
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
rt::System::new().block_on(async move {
|
rt::System::new().block_on(async move {
|
||||||
let tcp = net::TcpListener::bind(("127.0.0.1", cfg.port)).unwrap();
|
let tcp = net::TcpListener::bind((cfg.listen_address.clone(), cfg.port)).unwrap();
|
||||||
let local_addr = tcp.local_addr().unwrap();
|
let local_addr = tcp.local_addr().unwrap();
|
||||||
let factory = factory.clone();
|
let factory = factory.clone();
|
||||||
let srv_cfg = cfg.clone();
|
let srv_cfg = cfg.clone();
|
||||||
|
@ -460,7 +461,13 @@ where
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Client::builder().connector(connector).finish()
|
let mut client_builder = Client::builder().connector(connector);
|
||||||
|
|
||||||
|
if client_cfg.disable_redirects {
|
||||||
|
client_builder = client_builder.disable_redirects();
|
||||||
|
}
|
||||||
|
|
||||||
|
client_builder.finish()
|
||||||
};
|
};
|
||||||
|
|
||||||
TestServer {
|
TestServer {
|
||||||
|
@ -480,6 +487,7 @@ enum HttpVer {
|
||||||
Both,
|
Both,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
enum StreamType {
|
enum StreamType {
|
||||||
Tcp,
|
Tcp,
|
||||||
|
@ -505,8 +513,10 @@ pub struct TestServerConfig {
|
||||||
tp: HttpVer,
|
tp: HttpVer,
|
||||||
stream: StreamType,
|
stream: StreamType,
|
||||||
client_request_timeout: Duration,
|
client_request_timeout: Duration,
|
||||||
|
listen_address: String,
|
||||||
port: u16,
|
port: u16,
|
||||||
workers: usize,
|
workers: usize,
|
||||||
|
disable_redirects: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TestServerConfig {
|
impl Default for TestServerConfig {
|
||||||
|
@ -522,8 +532,10 @@ impl TestServerConfig {
|
||||||
tp: HttpVer::Both,
|
tp: HttpVer::Both,
|
||||||
stream: StreamType::Tcp,
|
stream: StreamType::Tcp,
|
||||||
client_request_timeout: Duration::from_secs(5),
|
client_request_timeout: Duration::from_secs(5),
|
||||||
|
listen_address: "127.0.0.1".to_string(),
|
||||||
port: 0,
|
port: 0,
|
||||||
workers: 1,
|
workers: 1,
|
||||||
|
disable_redirects: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -596,6 +608,14 @@ impl TestServerConfig {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the address the server will listen on.
|
||||||
|
///
|
||||||
|
/// By default, only listens on `127.0.0.1`.
|
||||||
|
pub fn listen_address(mut self, addr: impl Into<String>) -> Self {
|
||||||
|
self.listen_address = addr.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Sets test server port.
|
/// Sets test server port.
|
||||||
///
|
///
|
||||||
/// By default, a random free port is determined by the OS.
|
/// By default, a random free port is determined by the OS.
|
||||||
|
@ -611,6 +631,15 @@ impl TestServerConfig {
|
||||||
self.workers = workers;
|
self.workers = workers;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Instruct the client to not follow redirects.
|
||||||
|
///
|
||||||
|
/// By default, the client will follow up to 10 consecutive redirects
|
||||||
|
/// before giving up.
|
||||||
|
pub fn disable_redirects(mut self) -> Self {
|
||||||
|
self.disable_redirects = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A basic HTTP server controller that simplifies the process of writing integration tests for
|
/// A basic HTTP server controller that simplifies the process of writing integration tests for
|
||||||
|
@ -637,9 +666,9 @@ impl TestServer {
|
||||||
let scheme = if self.tls { "https" } else { "http" };
|
let scheme = if self.tls { "https" } else { "http" };
|
||||||
|
|
||||||
if uri.starts_with('/') {
|
if uri.starts_with('/') {
|
||||||
format!("{}://localhost:{}{}", scheme, self.addr.port(), uri)
|
format!("{}://{}{}", scheme, self.addr, uri)
|
||||||
} else {
|
} else {
|
||||||
format!("{}://localhost:{}/{}", scheme, self.addr.port(), uri)
|
format!("{}://{}/{}", scheme, self.addr, uri)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,10 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 4.3.1 <!-- v4.3.1+deprecated -->
|
||||||
|
|
||||||
|
- Reduce memory usage by `take`-ing (rather than `split`-ing) the encoded buffer when yielding bytes in the response stream.
|
||||||
|
- Mark crate as deprecated.
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
## 4.3.0
|
## 4.3.0
|
||||||
|
|
|
@ -1,17 +1,24 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-web-actors"
|
name = "actix-web-actors"
|
||||||
version = "4.3.0"
|
version = "4.3.1+deprecated"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Actix actors support for Actix Web"
|
description = "Actix actors support for Actix Web"
|
||||||
keywords = ["actix", "http", "web", "framework", "async"]
|
keywords = ["actix", "http", "web", "framework", "async"]
|
||||||
homepage = "https://actix.rs"
|
homepage.workspace = true
|
||||||
repository = "https://github.com/actix/actix-web"
|
repository.workspace = true
|
||||||
license = "MIT OR Apache-2.0"
|
license.workspace = true
|
||||||
edition = "2021"
|
edition.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_web_actors"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix::*",
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"bytes::*",
|
||||||
|
"bytestring::*",
|
||||||
|
"futures_core::*",
|
||||||
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix = { version = ">=0.12, <0.14", default-features = false }
|
actix = { version = ">=0.12, <0.14", default-features = false }
|
||||||
|
@ -35,3 +42,6 @@ actix-web = { version = "4", features = ["macros"] }
|
||||||
env_logger = "0.11"
|
env_logger = "0.11"
|
||||||
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
|
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
|
||||||
mime = "0.3"
|
mime = "0.3"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
# `actix-web-actors`
|
# `actix-web-actors`
|
||||||
|
|
||||||
> Actix actors support for Actix Web.
|
> Actix actors support for Actix Web.
|
||||||
|
>
|
||||||
|
> This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
|
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
|
||||||
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.0)](https://docs.rs/actix-web-actors/4.3.0)
|
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.1)](https://docs.rs/actix-web-actors/4.3.1)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![License](https://img.shields.io/crates/l/actix-web-actors.svg)
|
![License](https://img.shields.io/crates/l/actix-web-actors.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-web-actors/4.3.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.3.0)
|
![maintenance-status](https://img.shields.io/badge/maintenance-deprecated-red.svg)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
|
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! Actix actors support for Actix Web.
|
//! Actix actors support for Actix Web.
|
||||||
//!
|
//!
|
||||||
|
//! This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
|
||||||
|
//!
|
||||||
//! # Examples
|
//! # Examples
|
||||||
//!
|
//!
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
|
@ -55,8 +57,6 @@
|
||||||
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
|
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
|
||||||
//!
|
//!
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
|
@ -710,7 +710,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
if !this.buf.is_empty() {
|
if !this.buf.is_empty() {
|
||||||
Poll::Ready(Some(Ok(this.buf.split().freeze())))
|
Poll::Ready(Some(Ok(std::mem::take(&mut this.buf).freeze())))
|
||||||
} else if this.fut.alive() && !this.closed {
|
} else if this.fut.alive() && !this.closed {
|
||||||
Poll::Pending
|
Poll::Pending
|
||||||
} else {
|
} else {
|
||||||
|
@ -796,11 +796,8 @@ where
|
||||||
Some(frm) => {
|
Some(frm) => {
|
||||||
let msg = match frm {
|
let msg = match frm {
|
||||||
Frame::Text(data) => {
|
Frame::Text(data) => {
|
||||||
Message::Text(ByteString::try_from(data).map_err(|e| {
|
Message::Text(ByteString::try_from(data).map_err(|err| {
|
||||||
ProtocolError::Io(io::Error::new(
|
ProtocolError::Io(io::Error::new(io::ErrorKind::Other, err))
|
||||||
io::ErrorKind::Other,
|
|
||||||
format!("{}", e),
|
|
||||||
))
|
|
||||||
})?)
|
})?)
|
||||||
}
|
}
|
||||||
Frame::Binary(data) => Message::Binary(data),
|
Frame::Binary(data) => Message::Binary(data),
|
||||||
|
|
|
@ -2,6 +2,11 @@
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 4.3.0
|
||||||
|
|
||||||
|
- Add `#[scope]` macro.
|
||||||
|
- Add `compat-routing-macros-force-pub` crate feature which, on-by-default, which when disabled causes handlers to inherit their attached function's visibility.
|
||||||
|
- Prevent inclusion of default `actix-router` features.
|
||||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
## 4.2.2
|
## 4.2.2
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "actix-web-codegen"
|
name = "actix-web-codegen"
|
||||||
version = "4.2.2"
|
version = "4.3.0"
|
||||||
description = "Routing and runtime macros for Actix Web"
|
description = "Routing and runtime macros for Actix Web"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
|
@ -15,8 +15,12 @@ rust-version.workspace = true
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["compat-routing-macros-force-pub"]
|
||||||
|
compat-routing-macros-force-pub = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-router = "0.5"
|
actix-router = { version = "0.5", default-features = false }
|
||||||
proc-macro2 = "1"
|
proc-macro2 = "1"
|
||||||
quote = "1"
|
quote = "1"
|
||||||
syn = { version = "2", features = ["full", "extra-traits"] }
|
syn = { version = "2", features = ["full", "extra-traits"] }
|
||||||
|
@ -31,3 +35,6 @@ actix-web = "4"
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
trybuild = "1"
|
trybuild = "1"
|
||||||
rustversion = "1"
|
rustversion = "1"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
|
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
|
||||||
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.2.2)](https://docs.rs/actix-web-codegen/4.2.2)
|
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.3.0)](https://docs.rs/actix-web-codegen/4.3.0)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
|
![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-web-codegen/4.2.2/status.svg)](https://deps.rs/crate/actix-web-codegen/4.2.2)
|
[![dependency status](https://deps.rs/crate/actix-web-codegen/4.3.0/status.svg)](https://deps.rs/crate/actix-web-codegen/4.3.0)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
|
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
|
|
@ -73,8 +73,6 @@
|
||||||
//! [DELETE]: macro@delete
|
//! [DELETE]: macro@delete
|
||||||
|
|
||||||
#![recursion_limit = "512"]
|
#![recursion_limit = "512"]
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -83,6 +81,7 @@ use proc_macro::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
mod route;
|
mod route;
|
||||||
|
mod scope;
|
||||||
|
|
||||||
/// Creates resource handler, allowing multiple HTTP method guards.
|
/// Creates resource handler, allowing multiple HTTP method guards.
|
||||||
///
|
///
|
||||||
|
@ -197,6 +196,43 @@ method_macro!(Options, options);
|
||||||
method_macro!(Trace, trace);
|
method_macro!(Trace, trace);
|
||||||
method_macro!(Patch, patch);
|
method_macro!(Patch, patch);
|
||||||
|
|
||||||
|
/// Prepends a path prefix to all handlers using routing macros inside the attached module.
|
||||||
|
///
|
||||||
|
/// # Syntax
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use actix_web_codegen::scope;
|
||||||
|
/// #[scope("/prefix")]
|
||||||
|
/// mod api {
|
||||||
|
/// // ...
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// - `"/prefix"` - Raw literal string to be prefixed onto contained handlers' paths.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use actix_web_codegen::{scope, get};
|
||||||
|
/// # use actix_web::Responder;
|
||||||
|
/// #[scope("/api")]
|
||||||
|
/// mod api {
|
||||||
|
/// # use super::*;
|
||||||
|
/// #[get("/hello")]
|
||||||
|
/// pub async fn hello() -> impl Responder {
|
||||||
|
/// // this has path /api/hello
|
||||||
|
/// "Hello, world!"
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// # fn main() {}
|
||||||
|
/// ```
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn scope(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
|
scope::with_scope(args, input)
|
||||||
|
}
|
||||||
|
|
||||||
/// Marks async main function as the Actix Web system entry-point.
|
/// Marks async main function as the Actix Web system entry-point.
|
||||||
///
|
///
|
||||||
/// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is
|
/// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is
|
||||||
|
@ -240,3 +276,15 @@ pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
output.extend(item);
|
output.extend(item);
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts the error to a token stream and appends it to the original input.
|
||||||
|
///
|
||||||
|
/// Returning the original input in addition to the error is good for IDEs which can gracefully
|
||||||
|
/// recover and show more precise errors within the macro body.
|
||||||
|
///
|
||||||
|
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
|
||||||
|
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
|
||||||
|
let compile_err = TokenStream::from(err.to_compile_error());
|
||||||
|
item.extend(compile_err);
|
||||||
|
item
|
||||||
|
}
|
||||||
|
|
|
@ -6,10 +6,12 @@ use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||||
use quote::{quote, ToTokens, TokenStreamExt};
|
use quote::{quote, ToTokens, TokenStreamExt};
|
||||||
use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token};
|
use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token};
|
||||||
|
|
||||||
|
use crate::input_and_compile_error;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct RouteArgs {
|
pub struct RouteArgs {
|
||||||
path: syn::LitStr,
|
pub(crate) path: syn::LitStr,
|
||||||
options: Punctuated<syn::MetaNameValue, Token![,]>,
|
pub(crate) options: Punctuated<syn::MetaNameValue, Token![,]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl syn::parse::Parse for RouteArgs {
|
impl syn::parse::Parse for RouteArgs {
|
||||||
|
@ -78,7 +80,7 @@ macro_rules! standard_method_type {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_path(method: &Path) -> Result<Self, ()> {
|
pub(crate) fn from_path(method: &Path) -> Result<Self, ()> {
|
||||||
match () {
|
match () {
|
||||||
$(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+
|
$(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
|
@ -411,6 +413,13 @@ impl ToTokens for Route {
|
||||||
doc_attributes,
|
doc_attributes,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
|
#[allow(unused_variables)] // used when force-pub feature is disabled
|
||||||
|
let vis = &ast.vis;
|
||||||
|
|
||||||
|
// TODO(breaking): remove this force-pub forwards-compatibility feature
|
||||||
|
#[cfg(feature = "compat-routing-macros-force-pub")]
|
||||||
|
let vis = syn::Visibility::Public(<Token![pub]>::default());
|
||||||
|
|
||||||
let registrations: TokenStream2 = args
|
let registrations: TokenStream2 = args
|
||||||
.iter()
|
.iter()
|
||||||
.map(|args| {
|
.map(|args| {
|
||||||
|
@ -458,7 +467,7 @@ impl ToTokens for Route {
|
||||||
let stream = quote! {
|
let stream = quote! {
|
||||||
#(#doc_attributes)*
|
#(#doc_attributes)*
|
||||||
#[allow(non_camel_case_types, missing_docs)]
|
#[allow(non_camel_case_types, missing_docs)]
|
||||||
pub struct #name;
|
#vis struct #name;
|
||||||
|
|
||||||
impl ::actix_web::dev::HttpServiceFactory for #name {
|
impl ::actix_web::dev::HttpServiceFactory for #name {
|
||||||
fn register(self, __config: &mut actix_web::dev::AppService) {
|
fn register(self, __config: &mut actix_web::dev::AppService) {
|
||||||
|
@ -542,15 +551,3 @@ pub(crate) fn with_methods(input: TokenStream) -> TokenStream {
|
||||||
Err(err) => input_and_compile_error(input, err),
|
Err(err) => input_and_compile_error(input, err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the error to a token stream and appends it to the original input.
|
|
||||||
///
|
|
||||||
/// Returning the original input in addition to the error is good for IDEs which can gracefully
|
|
||||||
/// recover and show more precise errors within the macro body.
|
|
||||||
///
|
|
||||||
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
|
|
||||||
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
|
|
||||||
let compile_err = TokenStream::from(err.to_compile_error());
|
|
||||||
item.extend(compile_err);
|
|
||||||
item
|
|
||||||
}
|
|
||||||
|
|
103
actix-web-codegen/src/scope.rs
Normal file
103
actix-web-codegen/src/scope.rs
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
use proc_macro::TokenStream;
|
||||||
|
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||||
|
use quote::{quote, ToTokens as _};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
input_and_compile_error,
|
||||||
|
route::{MethodType, RouteArgs},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn with_scope(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
|
match with_scope_inner(args, input.clone()) {
|
||||||
|
Ok(stream) => stream,
|
||||||
|
Err(err) => input_and_compile_error(input, err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_scope_inner(args: TokenStream, input: TokenStream) -> syn::Result<TokenStream> {
|
||||||
|
if args.is_empty() {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
Span::call_site(),
|
||||||
|
"missing arguments for scope macro, expected: #[scope(\"/prefix\")]",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let scope_prefix = syn::parse::<syn::LitStr>(args.clone()).map_err(|err| {
|
||||||
|
syn::Error::new(
|
||||||
|
err.span(),
|
||||||
|
"argument to scope macro is not a string literal, expected: #[scope(\"/prefix\")]",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let scope_prefix_value = scope_prefix.value();
|
||||||
|
|
||||||
|
if scope_prefix_value.ends_with('/') {
|
||||||
|
// trailing slashes cause non-obvious problems
|
||||||
|
// it's better to point them out to developers rather than
|
||||||
|
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
scope_prefix.span(),
|
||||||
|
"scopes should not have trailing slashes; see https://docs.rs/actix-web/4/actix_web/struct.Scope.html#avoid-trailing-slashes",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut module = syn::parse::<syn::ItemMod>(input).map_err(|err| {
|
||||||
|
syn::Error::new(err.span(), "#[scope] macro must be attached to a module")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// modify any routing macros (method or route[s]) attached to
|
||||||
|
// functions by prefixing them with this scope macro's argument
|
||||||
|
if let Some((_, items)) = &mut module.content {
|
||||||
|
for item in items {
|
||||||
|
if let syn::Item::Fn(fun) = item {
|
||||||
|
fun.attrs = fun
|
||||||
|
.attrs
|
||||||
|
.iter()
|
||||||
|
.map(|attr| modify_attribute_with_scope(attr, &scope_prefix_value))
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(module.to_token_stream().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks if the attribute is a method type and has a route path, then modifies it.
|
||||||
|
fn modify_attribute_with_scope(attr: &syn::Attribute, scope_path: &str) -> syn::Attribute {
|
||||||
|
match (attr.parse_args::<RouteArgs>(), attr.clone().meta) {
|
||||||
|
(Ok(route_args), syn::Meta::List(meta_list)) if has_allowed_methods_in_scope(attr) => {
|
||||||
|
let modified_path = format!("{}{}", scope_path, route_args.path.value());
|
||||||
|
|
||||||
|
let options_tokens: Vec<TokenStream2> = route_args
|
||||||
|
.options
|
||||||
|
.iter()
|
||||||
|
.map(|option| {
|
||||||
|
quote! { ,#option }
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let combined_options_tokens: TokenStream2 =
|
||||||
|
options_tokens
|
||||||
|
.into_iter()
|
||||||
|
.fold(TokenStream2::new(), |mut acc, ts| {
|
||||||
|
acc.extend(std::iter::once(ts));
|
||||||
|
acc
|
||||||
|
});
|
||||||
|
|
||||||
|
syn::Attribute {
|
||||||
|
meta: syn::Meta::List(syn::MetaList {
|
||||||
|
tokens: quote! { #modified_path #combined_options_tokens },
|
||||||
|
..meta_list.clone()
|
||||||
|
}),
|
||||||
|
..attr.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => attr.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_allowed_methods_in_scope(attr: &syn::Attribute) -> bool {
|
||||||
|
MethodType::from_path(attr.path()).is_ok()
|
||||||
|
|| attr.path().is_ident("route")
|
||||||
|
|| attr.path().is_ident("ROUTE")
|
||||||
|
}
|
|
@ -145,7 +145,7 @@ async fn custom_resource_name_test<'a>(req: HttpRequest) -> impl Responder {
|
||||||
mod guard_module {
|
mod guard_module {
|
||||||
use actix_web::{guard::GuardContext, http::header};
|
use actix_web::{guard::GuardContext, http::header};
|
||||||
|
|
||||||
pub fn guard(ctx: &GuardContext) -> bool {
|
pub fn guard(ctx: &GuardContext<'_>) -> bool {
|
||||||
ctx.header::<header::Accept>()
|
ctx.header::<header::Accept>()
|
||||||
.map(|h| h.preference() == "image/*")
|
.map(|h| h.preference() == "image/*")
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
200
actix-web-codegen/tests/scopes.rs
Normal file
200
actix-web-codegen/tests/scopes.rs
Normal file
|
@ -0,0 +1,200 @@
|
||||||
|
use actix_web::{guard::GuardContext, http, http::header, web, App, HttpResponse, Responder};
|
||||||
|
use actix_web_codegen::{delete, get, post, route, routes, scope};
|
||||||
|
|
||||||
|
pub fn image_guard(ctx: &GuardContext<'_>) -> bool {
|
||||||
|
ctx.header::<header::Accept>()
|
||||||
|
.map(|h| h.preference() == "image/*")
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[scope("/test")]
|
||||||
|
mod scope_module {
|
||||||
|
// ensure that imports can be brought into the scope
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[get("/test/guard", guard = "image_guard")]
|
||||||
|
pub async fn guard() -> impl Responder {
|
||||||
|
HttpResponse::Ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/test")]
|
||||||
|
pub async fn test() -> impl Responder {
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/twice-test/{value}")]
|
||||||
|
pub async fn twice(value: web::Path<String>) -> impl actix_web::Responder {
|
||||||
|
let int_value: i32 = value.parse().unwrap_or(0);
|
||||||
|
let doubled = int_value * 2;
|
||||||
|
HttpResponse::Ok().body(format!("Twice value: {}", doubled))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/test")]
|
||||||
|
pub async fn post() -> impl Responder {
|
||||||
|
HttpResponse::Ok().body("post works")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[delete("/test")]
|
||||||
|
pub async fn delete() -> impl Responder {
|
||||||
|
"delete works"
|
||||||
|
}
|
||||||
|
|
||||||
|
#[route("/test", method = "PUT", method = "PATCH", method = "CUSTOM")]
|
||||||
|
pub async fn multiple_shared_path() -> impl Responder {
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[routes]
|
||||||
|
#[head("/test1")]
|
||||||
|
#[connect("/test2")]
|
||||||
|
#[options("/test3")]
|
||||||
|
#[trace("/test4")]
|
||||||
|
pub async fn multiple_separate_paths() -> impl Responder {
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
// test calling this from other mod scope with scope attribute...
|
||||||
|
pub fn mod_common(message: String) -> impl actix_web::Responder {
|
||||||
|
HttpResponse::Ok().body(message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Scope doc string to check in cargo expand.
|
||||||
|
#[scope("/v1")]
|
||||||
|
mod mod_scope_v1 {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
/// Route doc string to check in cargo expand.
|
||||||
|
#[get("/test")]
|
||||||
|
pub async fn test() -> impl Responder {
|
||||||
|
scope_module::mod_common("version1 works".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[scope("/v2")]
|
||||||
|
mod mod_scope_v2 {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// check to make sure non-function tokens in the scope block are preserved...
|
||||||
|
enum TestEnum {
|
||||||
|
Works,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/test")]
|
||||||
|
pub async fn test() -> impl Responder {
|
||||||
|
// make sure this type still exists...
|
||||||
|
let test_enum = TestEnum::Works;
|
||||||
|
|
||||||
|
match test_enum {
|
||||||
|
TestEnum::Works => scope_module::mod_common("version2 works".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_get_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::test));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::GET, srv.url("/test/test"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_get_param_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::twice));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::GET, srv.url("/test/twice-test/4"));
|
||||||
|
let mut response = request.send().await.unwrap();
|
||||||
|
let body = response.body().await.unwrap();
|
||||||
|
let body_str = String::from_utf8(body.to_vec()).unwrap();
|
||||||
|
assert_eq!(body_str, "Twice value: 8");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_post_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::post));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::POST, srv.url("/test/test"));
|
||||||
|
let mut response = request.send().await.unwrap();
|
||||||
|
let body = response.body().await.unwrap();
|
||||||
|
let body_str = String::from_utf8(body.to_vec()).unwrap();
|
||||||
|
assert_eq!(body_str, "post works");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn multiple_shared_path_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::multiple_shared_path));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::PUT, srv.url("/test/test"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::PATCH, srv.url("/test/test"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn multiple_multi_path_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::multiple_separate_paths));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::HEAD, srv.url("/test/test1"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::CONNECT, srv.url("/test/test2"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::OPTIONS, srv.url("/test/test3"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::TRACE, srv.url("/test/test4"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_delete_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::delete));
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::DELETE, srv.url("/test/test"));
|
||||||
|
let mut response = request.send().await.unwrap();
|
||||||
|
let body = response.body().await.unwrap();
|
||||||
|
let body_str = String::from_utf8(body.to_vec()).unwrap();
|
||||||
|
assert_eq!(body_str, "delete works");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_get_with_guard_async() {
|
||||||
|
let srv = actix_test::start(|| App::new().service(scope_module::guard));
|
||||||
|
|
||||||
|
let request = srv
|
||||||
|
.request(http::Method::GET, srv.url("/test/test/guard"))
|
||||||
|
.insert_header(("Accept", "image/*"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn scope_v1_v2_async() {
|
||||||
|
let srv = actix_test::start(|| {
|
||||||
|
App::new()
|
||||||
|
.service(mod_scope_v1::test)
|
||||||
|
.service(mod_scope_v2::test)
|
||||||
|
});
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::GET, srv.url("/v1/test"));
|
||||||
|
let mut response = request.send().await.unwrap();
|
||||||
|
let body = response.body().await.unwrap();
|
||||||
|
let body_str = String::from_utf8(body.to_vec()).unwrap();
|
||||||
|
assert_eq!(body_str, "version1 works");
|
||||||
|
|
||||||
|
let request = srv.request(http::Method::GET, srv.url("/v2/test"));
|
||||||
|
let mut response = request.send().await.unwrap();
|
||||||
|
let body = response.body().await.unwrap();
|
||||||
|
let body_str = String::from_utf8(body.to_vec()).unwrap();
|
||||||
|
assert_eq!(body_str, "version2 works");
|
||||||
|
}
|
|
@ -18,6 +18,11 @@ fn compile_macros() {
|
||||||
t.compile_fail("tests/trybuild/routes-missing-method-fail.rs");
|
t.compile_fail("tests/trybuild/routes-missing-method-fail.rs");
|
||||||
t.compile_fail("tests/trybuild/routes-missing-args-fail.rs");
|
t.compile_fail("tests/trybuild/routes-missing-args-fail.rs");
|
||||||
|
|
||||||
|
t.compile_fail("tests/trybuild/scope-on-handler.rs");
|
||||||
|
t.compile_fail("tests/trybuild/scope-missing-args.rs");
|
||||||
|
t.compile_fail("tests/trybuild/scope-invalid-args.rs");
|
||||||
|
t.compile_fail("tests/trybuild/scope-trailing-slash.rs");
|
||||||
|
|
||||||
t.pass("tests/trybuild/docstring-ok.rs");
|
t.pass("tests/trybuild/docstring-ok.rs");
|
||||||
|
|
||||||
t.pass("tests/trybuild/test-runtime.rs");
|
t.pass("tests/trybuild/test-runtime.rs");
|
||||||
|
|
|
@ -20,10 +20,7 @@ error: custom attribute panicked
|
||||||
13 | #[get("/{}")]
|
13 | #[get("/{}")]
|
||||||
| ^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
= help: message: Wrong path pattern: "/{}" regex parse error:
|
= help: message: Wrong path pattern: "/{}" empty capture group names are not allowed
|
||||||
((?s-m)^/(?P<>[^/]+))$
|
|
||||||
^
|
|
||||||
error: empty capture group name
|
|
||||||
|
|
||||||
error: custom attribute panicked
|
error: custom attribute panicked
|
||||||
--> $DIR/route-malformed-path-fail.rs:23:1
|
--> $DIR/route-malformed-path-fail.rs:23:1
|
||||||
|
|
14
actix-web-codegen/tests/trybuild/scope-invalid-args.rs
Normal file
14
actix-web-codegen/tests/trybuild/scope-invalid-args.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
use actix_web_codegen::scope;
|
||||||
|
|
||||||
|
const PATH: &str = "/api";
|
||||||
|
|
||||||
|
#[scope(PATH)]
|
||||||
|
mod api_const {}
|
||||||
|
|
||||||
|
#[scope(true)]
|
||||||
|
mod api_bool {}
|
||||||
|
|
||||||
|
#[scope(123)]
|
||||||
|
mod api_num {}
|
||||||
|
|
||||||
|
fn main() {}
|
17
actix-web-codegen/tests/trybuild/scope-invalid-args.stderr
Normal file
17
actix-web-codegen/tests/trybuild/scope-invalid-args.stderr
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
error: argument to scope macro is not a string literal, expected: #[scope("/prefix")]
|
||||||
|
--> tests/trybuild/scope-invalid-args.rs:5:9
|
||||||
|
|
|
||||||
|
5 | #[scope(PATH)]
|
||||||
|
| ^^^^
|
||||||
|
|
||||||
|
error: argument to scope macro is not a string literal, expected: #[scope("/prefix")]
|
||||||
|
--> tests/trybuild/scope-invalid-args.rs:8:9
|
||||||
|
|
|
||||||
|
8 | #[scope(true)]
|
||||||
|
| ^^^^
|
||||||
|
|
||||||
|
error: argument to scope macro is not a string literal, expected: #[scope("/prefix")]
|
||||||
|
--> tests/trybuild/scope-invalid-args.rs:11:9
|
||||||
|
|
|
||||||
|
11 | #[scope(123)]
|
||||||
|
| ^^^
|
6
actix-web-codegen/tests/trybuild/scope-missing-args.rs
Normal file
6
actix-web-codegen/tests/trybuild/scope-missing-args.rs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
use actix_web_codegen::scope;
|
||||||
|
|
||||||
|
#[scope]
|
||||||
|
mod api {}
|
||||||
|
|
||||||
|
fn main() {}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue