Slightly improved S3 compatibility

- ListBucket does not require any of the parameters (delimiter,
    prefix, max-keys, etc)
- URLs are properly percent_decoded
- PutObject and DeleteObject calls now answer correctly
    (empty body, version id in the x-amz-version-id: header)
This commit is contained in:
Alex Auvolat 2020-05-01 14:30:50 +00:00
parent 3686f100b7
commit 3324971701
6 changed files with 83 additions and 47 deletions

1
Cargo.lock generated
View file

@ -374,6 +374,7 @@ dependencies = [
"httpdate 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "httpdate 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)", "tokio 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -30,5 +30,5 @@ http = "0.2"
hyper = "0.13" hyper = "0.13"
url = "2.1" url = "2.1"
httpdate = "0.3" httpdate = "0.3"
percent-encoding = "2.1.0"

View file

@ -73,7 +73,11 @@ async fn handler_inner(
req: Request<Body>, req: Request<Body>,
) -> Result<Response<BodyType>, Error> { ) -> Result<Response<BodyType>, Error> {
let path = req.uri().path().to_string(); let path = req.uri().path().to_string();
let (bucket, key) = parse_bucket_key(path.as_str())?; let path = percent_encoding::percent_decode_str(&path)
.decode_utf8()
.map_err(|e| Error::BadRequest(format!("Invalid utf8 key ({})", e)))?;
let (bucket, key) = parse_bucket_key(&path)?;
if bucket.len() == 0 { if bucket.len() == 0 {
return Err(Error::Forbidden(format!( return Err(Error::Forbidden(format!(
"Operations on buckets not allowed" "Operations on buckets not allowed"
@ -120,7 +124,12 @@ async fn handler_inner(
} else if req.headers().contains_key("x-amz-copy-source") { } else if req.headers().contains_key("x-amz-copy-source") {
// CopyObject query // CopyObject query
let copy_source = req.headers().get("x-amz-copy-source").unwrap().to_str()?; let copy_source = req.headers().get("x-amz-copy-source").unwrap().to_str()?;
let (source_bucket, source_key) = parse_bucket_key(copy_source)?; let copy_source = percent_encoding::percent_decode_str(&copy_source)
.decode_utf8()
.map_err(|e| {
Error::BadRequest(format!("Invalid utf8 copy_source ({})", e))
})?;
let (source_bucket, source_key) = parse_bucket_key(&copy_source)?;
if !api_key.allow_read(&source_bucket) { if !api_key.allow_read(&source_bucket) {
return Err(Error::Forbidden(format!( return Err(Error::Forbidden(format!(
"Reading from bucket {} not allowed for this key", "Reading from bucket {} not allowed for this key",
@ -145,8 +154,7 @@ async fn handler_inner(
} else { } else {
// DeleteObject query // DeleteObject query
let version_uuid = handle_delete(garage, &bucket, &key).await?; let version_uuid = handle_delete(garage, &bucket, &key).await?;
let response = format!("{}\n", hex::encode(version_uuid)); Ok(put_response(version_uuid))
Ok(Response::new(Box::new(BytesBody::from(response))))
} }
} }
&Method::POST => { &Method::POST => {
@ -170,9 +178,14 @@ async fn handler_inner(
} }
} else { } else {
match req.method() { match req.method() {
&Method::PUT | &Method::HEAD => { &Method::PUT => {
// If PUT: CreateBucket, if HEAD: HeadBucket // CreateBucket
// If we're here, the bucket already exists, so just answer ok // If we're here, the bucket already exists, so just answer ok
println!(
"Body: {}",
std::str::from_utf8(&hyper::body::to_bytes(req.into_body()).await?)
.unwrap_or("<invalid utf8>")
);
let empty_body: BodyType = Box::new(BytesBody::from(vec![])); let empty_body: BodyType = Box::new(BytesBody::from(vec![]));
let response = Response::builder() let response = Response::builder()
.header("Location", format!("/{}", bucket)) .header("Location", format!("/{}", bucket))
@ -180,6 +193,12 @@ async fn handler_inner(
.unwrap(); .unwrap();
Ok(response) Ok(response)
} }
&Method::HEAD => {
// HeadBucket
let empty_body: BodyType = Box::new(BytesBody::from(vec![]));
let response = Response::builder().body(empty_body).unwrap();
Ok(response)
}
&Method::DELETE => { &Method::DELETE => {
// DeleteBucket query // DeleteBucket query
Err(Error::Forbidden( Err(Error::Forbidden(
@ -187,7 +206,6 @@ async fn handler_inner(
)) ))
} }
&Method::GET => { &Method::GET => {
if params.contains_key(&"prefix".to_string()) {
// ListObjects query // ListObjects query
let delimiter = params.get("delimiter").map(|x| x.as_str()).unwrap_or(&""); let delimiter = params.get("delimiter").map(|x| x.as_str()).unwrap_or(&"");
let max_keys = params let max_keys = params
@ -198,7 +216,7 @@ async fn handler_inner(
}) })
}) })
.unwrap_or(Ok(1000))?; .unwrap_or(Ok(1000))?;
let prefix = params.get("prefix").unwrap(); let prefix = params.get("prefix").map(|x| x.as_str()).unwrap_or(&"");
let urlencode_resp = params let urlencode_resp = params
.get("encoding-type") .get("encoding-type")
.map(|x| x == "url") .map(|x| x == "url")
@ -214,11 +232,6 @@ async fn handler_inner(
urlencode_resp, urlencode_resp,
) )
.await?) .await?)
} else {
Err(Error::BadRequest(format!(
"Not a list call, so what is it?"
)))
}
} }
_ => Err(Error::BadRequest(format!("Invalid method"))), _ => Err(Error::BadRequest(format!("Invalid method"))),
} }
@ -229,7 +242,14 @@ fn parse_bucket_key(path: &str) -> Result<(&str, Option<&str>), Error> {
let path = path.trim_start_matches('/'); let path = path.trim_start_matches('/');
match path.find('/') { match path.find('/') {
Some(i) => Ok((&path[..i], Some(&path[i + 1..]))), Some(i) => {
let key = &path[i + 1..];
if key.len() > 0 {
Ok((&path[..i], Some(key)))
} else {
Ok((&path[..i], None))
}
}
None => Ok((path, None)), None => Ok((path, None)),
} }
} }

View file

@ -82,3 +82,7 @@ impl From<Vec<u8>> for BytesBody {
Self::new(Bytes::from(x)) Self::new(Bytes::from(x))
} }
} }
pub fn empty_body() -> BodyType {
Box::new(BytesBody::from(vec![]))
}

View file

@ -29,6 +29,7 @@ pub async fn handle_list(
) -> Result<Response<BodyType>, Error> { ) -> Result<Response<BodyType>, Error> {
let mut result_keys = BTreeMap::<String, ListResultInfo>::new(); let mut result_keys = BTreeMap::<String, ListResultInfo>::new();
let mut result_common_prefixes = BTreeSet::<String>::new(); let mut result_common_prefixes = BTreeSet::<String>::new();
let mut truncated = true; let mut truncated = true;
let mut next_chunk_start = marker.unwrap_or(prefix).to_string(); let mut next_chunk_start = marker.unwrap_or(prefix).to_string();
@ -44,12 +45,19 @@ pub async fn handle_list(
max_keys, max_keys,
) )
.await?; .await?;
debug!(
"List: get range {} (max {}), results: {}",
next_chunk_start,
max_keys,
objects.len()
);
for object in objects.iter() { for object in objects.iter() {
if let Some(version) = object.versions().iter().find(|x| x.is_data()) {
if !object.key.starts_with(prefix) { if !object.key.starts_with(prefix) {
truncated = false; truncated = false;
break; break;
} }
if let Some(version) = object.versions().iter().find(|x| x.is_data()) {
let common_prefix = if delimiter.len() > 0 { let common_prefix = if delimiter.len() > 0 {
let relative_key = &object.key[prefix.len()..]; let relative_key = &object.key[prefix.len()..];
match relative_key.find(delimiter) { match relative_key.find(delimiter) {
@ -117,7 +125,7 @@ pub async fn handle_list(
for pfx in result_common_prefixes.iter() { for pfx in result_common_prefixes.iter() {
writeln!( writeln!(
&mut xml, &mut xml,
"\t<Prefix>{}</Prefix>", "\t\t<Prefix>{}</Prefix>",
xml_escape(pfx), xml_escape(pfx),
//xml_encode_key(pfx, urlencode_resp) //xml_encode_key(pfx, urlencode_resp)
) )
@ -126,6 +134,7 @@ pub async fn handle_list(
writeln!(&mut xml, "\t</CommonPrefixes>").unwrap(); writeln!(&mut xml, "\t</CommonPrefixes>").unwrap();
} }
writeln!(&mut xml, "</ListBucketResult>").unwrap(); writeln!(&mut xml, "</ListBucketResult>").unwrap();
println!("{}", xml);
Ok(Response::new(Box::new(BytesBody::from(xml.into_bytes())))) Ok(Response::new(Box::new(BytesBody::from(xml.into_bytes()))))
} }

View file

@ -195,9 +195,11 @@ impl BodyChunker {
} }
} }
fn put_response(version_uuid: UUID) -> Response<BodyType> { pub fn put_response(version_uuid: UUID) -> Response<BodyType> {
let resp_bytes = format!("{}\n", hex::encode(version_uuid)); Response::builder()
Response::new(Box::new(BytesBody::from(resp_bytes))) .header("x-amz-version-id", hex::encode(version_uuid))
.body(empty_body())
.unwrap()
} }
pub async fn handle_create_multipart_upload( pub async fn handle_create_multipart_upload(