1
0
Fork 0
mirror of https://github.com/actix/actix-web.git synced 2024-06-02 21:39:26 +00:00

Prefer cooperative compression over blocking pool.

This commit is contained in:
Yury Yarashevich 2023-09-24 00:12:54 +02:00
parent 05b4c4964f
commit d90883f8f0
4 changed files with 216 additions and 54 deletions

View file

@ -2,6 +2,8 @@
## Unreleased
- Revise compression middleware to perform compression cooperatively, periodically yielding control to other tasks instead of offloading compression to a background thread.
## 3.4.0
### Added

View file

@ -140,3 +140,8 @@ required-features = ["http2", "rustls-0_21"]
name = "response-body-compression"
harness = false
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
[[bench]]
name = "compression-chunk-size"
harness = false
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]

View file

@ -0,0 +1,51 @@
#![allow(clippy::uninlined_format_args)]
use actix_http::{body, encoding::Encoder, ContentEncoding, ResponseHead, StatusCode};
use criterion::{criterion_group, criterion_main, Criterion};
const BODY: &[u8] = include_bytes!("../../Cargo.lock");
const CHUNK_SIZES: [usize; 7] = [512, 1024, 2048, 4096, 8192, 16384, 32768];
const CONTENT_ENCODING: [ContentEncoding; 4] = [
ContentEncoding::Deflate,
ContentEncoding::Gzip,
ContentEncoding::Zstd,
ContentEncoding::Brotli,
];
fn compression_responses(c: &mut Criterion) {
static_assertions::const_assert!(BODY.len() > CHUNK_SIZES[6]);
let mut group = c.benchmark_group("time to compress chunk");
for content_encoding in CONTENT_ENCODING {
for chunk_size in CHUNK_SIZES {
group.bench_function(
format!("{}-{}", content_encoding.as_str(), chunk_size),
|b| {
let rt = actix_rt::Runtime::new().unwrap();
b.iter(|| {
rt.block_on(async move {
let encoder = Encoder::response(
content_encoding,
&mut ResponseHead::new(StatusCode::OK),
&BODY[..chunk_size],
)
.with_encode_chunk_size(chunk_size);
body::to_bytes_limited(encoder, chunk_size + 256)
.await
.unwrap()
.unwrap();
});
});
},
);
}
}
group.finish();
}
criterion_group!(benches, compression_responses);
criterion_main!(benches);

View file

@ -2,14 +2,12 @@
use std::{
error::Error as StdError,
future::Future,
io::{self, Write as _},
pin::Pin,
task::{Context, Poll},
};
use actix_rt::task::{spawn_blocking, JoinHandle};
use bytes::Bytes;
use bytes::{Buf, Bytes};
use derive_more::Display;
#[cfg(feature = "compress-gzip")]
use flate2::write::{GzEncoder, ZlibEncoder};
@ -26,14 +24,12 @@ use crate::{
ResponseHead, StatusCode,
};
const MAX_CHUNK_SIZE_ENCODE_IN_PLACE: usize = 1024;
pin_project! {
pub struct Encoder<B> {
#[pin]
body: EncoderBody<B>,
encoder: Option<ContentEncoder>,
fut: Option<JoinHandle<Result<ContentEncoder, io::Error>>>,
encoder: Option<SelectedContentEncoder>,
chunk_ready_to_encode: Option<Bytes>,
eof: bool,
}
}
@ -45,7 +41,7 @@ impl<B: MessageBody> Encoder<B> {
body: body::None::new(),
},
encoder: None,
fut: None,
chunk_ready_to_encode: None,
eof: true,
}
}
@ -68,13 +64,13 @@ impl<B: MessageBody> Encoder<B> {
if should_encode {
// wrap body only if encoder is feature-enabled
if let Some(enc) = ContentEncoder::select(encoding) {
if let Some(selected_encoder) = ContentEncoder::select(encoding) {
update_head(encoding, head);
return Encoder {
body,
encoder: Some(enc),
fut: None,
encoder: Some(selected_encoder),
chunk_ready_to_encode: None,
eof: false,
};
}
@ -83,10 +79,19 @@ impl<B: MessageBody> Encoder<B> {
Encoder {
body,
encoder: None,
fut: None,
chunk_ready_to_encode: None,
eof: false,
}
}
pub fn with_encode_chunk_size(mut self, size: usize) -> Self {
if size > 0 {
if let Some(selected_encoder) = self.encoder.as_mut() {
selected_encoder.preferred_chunk_size = size;
}
}
self
}
}
pin_project! {
@ -169,22 +174,30 @@ where
return Poll::Ready(None);
}
if let Some(ref mut fut) = this.fut {
let mut encoder = ready!(Pin::new(fut).poll(cx))
.map_err(|_| {
EncoderError::Io(io::Error::new(
io::ErrorKind::Other,
"Blocking task was cancelled unexpectedly",
))
})?
if let Some(chunk) = this.chunk_ready_to_encode.as_mut() {
let selected_encoder = this.encoder.as_mut().expect(
"when chunk_ready_to_encode is presented the encoder is expected to be presented as well",
);
let encode_len = chunk.len().min(selected_encoder.preferred_chunk_size);
selected_encoder
.content_encoder
.write(&chunk[..encode_len])
.map_err(EncoderError::Io)?;
chunk.advance(encode_len);
let chunk = encoder.take();
*this.encoder = Some(encoder);
this.fut.take();
if chunk.is_empty() {
*this.chunk_ready_to_encode = None;
}
if !chunk.is_empty() {
return Poll::Ready(Some(Ok(chunk)));
let encoded_chunk = selected_encoder.content_encoder.take();
if !encoded_chunk.is_empty() {
return Poll::Ready(Some(Ok(encoded_chunk)));
}
if this.chunk_ready_to_encode.is_some() {
// Yield execution to give chance other futures to execute
cx.waker().wake_by_ref();
return Poll::Pending;
}
}
@ -194,29 +207,18 @@ where
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
Some(Ok(chunk)) => {
if let Some(mut encoder) = this.encoder.take() {
if chunk.len() < MAX_CHUNK_SIZE_ENCODE_IN_PLACE {
encoder.write(&chunk).map_err(EncoderError::Io)?;
let chunk = encoder.take();
*this.encoder = Some(encoder);
if !chunk.is_empty() {
return Poll::Ready(Some(Ok(chunk)));
}
} else {
*this.fut = Some(spawn_blocking(move || {
encoder.write(&chunk)?;
Ok(encoder)
}));
}
} else {
if this.encoder.is_none() {
return Poll::Ready(Some(Ok(chunk)));
}
*this.chunk_ready_to_encode = Some(chunk);
}
None => {
if let Some(encoder) = this.encoder.take() {
let chunk = encoder.finish().map_err(EncoderError::Io)?;
if let Some(selected_encoder) = this.encoder.take() {
let chunk = selected_encoder
.content_encoder
.finish()
.map_err(EncoderError::Io)?;
if chunk.is_empty() {
return Poll::Ready(None);
@ -276,28 +278,56 @@ enum ContentEncoder {
Zstd(ZstdEncoder<'static, Writer>),
}
struct SelectedContentEncoder {
content_encoder: ContentEncoder,
preferred_chunk_size: usize,
}
impl ContentEncoder {
fn select(encoding: ContentEncoding) -> Option<Self> {
fn select(encoding: ContentEncoding) -> Option<SelectedContentEncoder> {
// Chunk size picked as max chunk size which took less that 50 µs to compress on "cargo bench --bench compression-chunk-size"
// Rust 1.72 linux/arm64 in Docker on Apple M2 Pro: "time to compress chunk/deflate-16384" time: [39.114 µs 39.283 µs 39.457 µs]
const MAX_DEFLATE_CHUNK_SIZE: usize = 16384;
// Rust 1.72 linux/arm64 in Docker on Apple M2 Pro: "time to compress chunk/gzip-16384" time: [40.121 µs 40.340 µs 40.566 µs]
const MAX_GZIP_CHUNK_SIZE: usize = 16384;
// Rust 1.72 linux/arm64 in Docker on Apple M2 Pro: "time to compress chunk/br-8192" time: [46.076 µs 46.208 µs 46.343 µs]
const MAX_BROTLI_CHUNK_SIZE: usize = 8192;
// Rust 1.72 linux/arm64 in Docker on Apple M2 Pro: "time to compress chunk/zstd-16384" time: [32.872 µs 32.967 µs 33.068 µs]
const MAX_ZSTD_CHUNK_SIZE: usize = 16384;
match encoding {
#[cfg(feature = "compress-gzip")]
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
Writer::new(),
flate2::Compression::fast(),
))),
ContentEncoding::Deflate => Some(SelectedContentEncoder {
content_encoder: ContentEncoder::Deflate(ZlibEncoder::new(
Writer::new(),
flate2::Compression::fast(),
)),
preferred_chunk_size: MAX_DEFLATE_CHUNK_SIZE,
}),
#[cfg(feature = "compress-gzip")]
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
Writer::new(),
flate2::Compression::fast(),
))),
ContentEncoding::Gzip => Some(SelectedContentEncoder {
content_encoder: ContentEncoder::Gzip(GzEncoder::new(
Writer::new(),
flate2::Compression::fast(),
)),
preferred_chunk_size: MAX_GZIP_CHUNK_SIZE,
}),
#[cfg(feature = "compress-brotli")]
ContentEncoding::Brotli => Some(ContentEncoder::Brotli(new_brotli_compressor())),
ContentEncoding::Brotli => Some(SelectedContentEncoder {
content_encoder: ContentEncoder::Brotli(new_brotli_compressor()),
preferred_chunk_size: MAX_BROTLI_CHUNK_SIZE,
}),
#[cfg(feature = "compress-zstd")]
ContentEncoding::Zstd => {
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
Some(ContentEncoder::Zstd(encoder))
Some(SelectedContentEncoder {
content_encoder: ContentEncoder::Zstd(encoder),
preferred_chunk_size: MAX_ZSTD_CHUNK_SIZE,
})
}
_ => None,
@ -426,3 +456,77 @@ impl From<EncoderError> for crate::Error {
crate::Error::new_encoder().with_cause(err)
}
}
#[cfg(test)]
mod tests {
use bytes::BytesMut;
use rand::{seq::SliceRandom, Rng};
use super::*;
static EMPTY_BODY: &[u8] = &[];
static SHORT_BODY: &[u8] = &[1, 2, 3, 4, 6, 7, 8];
static LONG_BODY: &[u8] = include_bytes!("encoder.rs");
static BODIES: &[&[u8]] = &[EMPTY_BODY, SHORT_BODY, LONG_BODY];
async fn test_compression_of_conentent_enconding(encoding: ContentEncoding, body: &[u8]) {
let mut head = ResponseHead::new(StatusCode::OK);
let body_to_compress = {
let mut body = BytesMut::from(body);
body.shuffle(&mut rand::thread_rng());
body.freeze()
};
let compressed_body = Encoder::response(encoding, &mut head, body_to_compress.clone())
.with_encode_chunk_size(rand::thread_rng().gen_range(32..128));
let SelectedContentEncoder {
content_encoder: mut compressor,
preferred_chunk_size: _,
} = ContentEncoder::select(encoding).unwrap();
compressor.write(&body_to_compress).unwrap();
let reference_compressed_bytes = compressor.finish().unwrap();
let compressed_bytes =
body::to_bytes_limited(compressed_body, 256 + body_to_compress.len())
.await
.unwrap()
.unwrap();
assert_eq!(reference_compressed_bytes, compressed_bytes);
}
#[actix_rt::test]
#[cfg(feature = "compress-gzip")]
async fn test_gzip_compression_in_chunks_is_the_same_as_whole_chunk_compression() {
for body in BODIES {
test_compression_of_conentent_enconding(ContentEncoding::Gzip, body).await;
}
}
#[actix_rt::test]
#[cfg(feature = "compress-gzip")]
async fn test_deflate_compression_in_chunks_is_the_same_as_whole_chunk_compression() {
for body in BODIES {
test_compression_of_conentent_enconding(ContentEncoding::Deflate, body).await;
}
}
#[actix_rt::test]
#[cfg(feature = "compress-brotli")]
async fn test_brotli_compression_in_chunks_is_the_same_as_whole_chunk_compression() {
for body in BODIES {
test_compression_of_conentent_enconding(ContentEncoding::Brotli, body).await;
}
}
#[actix_rt::test]
#[cfg(feature = "compress-zstd")]
async fn test_zstd_compression_in_chunks_is_the_same_as_whole_chunk_compression() {
for body in BODIES {
test_compression_of_conentent_enconding(ContentEncoding::Zstd, body).await;
}
}
}