mirror of
https://git.deuxfleurs.fr/Deuxfleurs/garage.git
synced 2024-11-21 23:51:00 +00:00
[multi-char-delimiter-692] allow multi-character delimiters in List* (fix #692)
This commit is contained in:
parent
0c7ce001c9
commit
10bc2ead60
3 changed files with 67 additions and 7 deletions
|
@ -260,7 +260,7 @@ impl ApiHandler for S3ApiServer {
|
||||||
common: ListQueryCommon {
|
common: ListQueryCommon {
|
||||||
bucket_name,
|
bucket_name,
|
||||||
bucket_id,
|
bucket_id,
|
||||||
delimiter: delimiter.map(|d| d.to_string()),
|
delimiter,
|
||||||
page_size: max_keys.unwrap_or(1000).clamp(1, 1000),
|
page_size: max_keys.unwrap_or(1000).clamp(1, 1000),
|
||||||
prefix: prefix.unwrap_or_default(),
|
prefix: prefix.unwrap_or_default(),
|
||||||
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
||||||
|
@ -290,7 +290,7 @@ impl ApiHandler for S3ApiServer {
|
||||||
common: ListQueryCommon {
|
common: ListQueryCommon {
|
||||||
bucket_name,
|
bucket_name,
|
||||||
bucket_id,
|
bucket_id,
|
||||||
delimiter: delimiter.map(|d| d.to_string()),
|
delimiter,
|
||||||
page_size: max_keys.unwrap_or(1000).clamp(1, 1000),
|
page_size: max_keys.unwrap_or(1000).clamp(1, 1000),
|
||||||
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
||||||
prefix: prefix.unwrap_or_default(),
|
prefix: prefix.unwrap_or_default(),
|
||||||
|
@ -323,7 +323,7 @@ impl ApiHandler for S3ApiServer {
|
||||||
common: ListQueryCommon {
|
common: ListQueryCommon {
|
||||||
bucket_name,
|
bucket_name,
|
||||||
bucket_id,
|
bucket_id,
|
||||||
delimiter: delimiter.map(|d| d.to_string()),
|
delimiter,
|
||||||
page_size: max_uploads.unwrap_or(1000).clamp(1, 1000),
|
page_size: max_uploads.unwrap_or(1000).clamp(1, 1000),
|
||||||
prefix: prefix.unwrap_or_default(),
|
prefix: prefix.unwrap_or_default(),
|
||||||
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
urlencode_resp: encoding_type.map(|e| e == "url").unwrap_or(false),
|
||||||
|
|
|
@ -170,7 +170,7 @@ pub enum Endpoint {
|
||||||
},
|
},
|
||||||
ListBuckets,
|
ListBuckets,
|
||||||
ListMultipartUploads {
|
ListMultipartUploads {
|
||||||
delimiter: Option<char>,
|
delimiter: Option<String>,
|
||||||
encoding_type: Option<String>,
|
encoding_type: Option<String>,
|
||||||
key_marker: Option<String>,
|
key_marker: Option<String>,
|
||||||
max_uploads: Option<usize>,
|
max_uploads: Option<usize>,
|
||||||
|
@ -178,7 +178,7 @@ pub enum Endpoint {
|
||||||
upload_id_marker: Option<String>,
|
upload_id_marker: Option<String>,
|
||||||
},
|
},
|
||||||
ListObjects {
|
ListObjects {
|
||||||
delimiter: Option<char>,
|
delimiter: Option<String>,
|
||||||
encoding_type: Option<String>,
|
encoding_type: Option<String>,
|
||||||
marker: Option<String>,
|
marker: Option<String>,
|
||||||
max_keys: Option<usize>,
|
max_keys: Option<usize>,
|
||||||
|
@ -188,7 +188,7 @@ pub enum Endpoint {
|
||||||
// This value should always be 2. It is not checked when constructing the struct
|
// This value should always be 2. It is not checked when constructing the struct
|
||||||
list_type: String,
|
list_type: String,
|
||||||
continuation_token: Option<String>,
|
continuation_token: Option<String>,
|
||||||
delimiter: Option<char>,
|
delimiter: Option<String>,
|
||||||
encoding_type: Option<String>,
|
encoding_type: Option<String>,
|
||||||
fetch_owner: Option<bool>,
|
fetch_owner: Option<bool>,
|
||||||
max_keys: Option<usize>,
|
max_keys: Option<usize>,
|
||||||
|
@ -196,7 +196,7 @@ pub enum Endpoint {
|
||||||
start_after: Option<String>,
|
start_after: Option<String>,
|
||||||
},
|
},
|
||||||
ListObjectVersions {
|
ListObjectVersions {
|
||||||
delimiter: Option<char>,
|
delimiter: Option<String>,
|
||||||
encoding_type: Option<String>,
|
encoding_type: Option<String>,
|
||||||
key_marker: Option<String>,
|
key_marker: Option<String>,
|
||||||
max_keys: Option<u64>,
|
max_keys: Option<u64>,
|
||||||
|
|
|
@ -613,3 +613,63 @@ async fn test_listmultipart() {
|
||||||
assert!(r.common_prefixes.is_none());
|
assert!(r.common_prefixes.is_none());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_multichar_delimiter() {
|
||||||
|
// Test case from dpape from issue #692 with reference results from Amazon
|
||||||
|
|
||||||
|
let ctx = common::context();
|
||||||
|
let bucket = ctx.create_bucket("multichardelim");
|
||||||
|
|
||||||
|
for k in [
|
||||||
|
"a/", "a/b/", "a/b/c/", "a/b/c/d", "a/c/", "a/c/b/", "a/c/b/e",
|
||||||
|
] {
|
||||||
|
ctx.client
|
||||||
|
.put_object()
|
||||||
|
.bucket(&bucket)
|
||||||
|
.key(k)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// With delimiter /
|
||||||
|
{
|
||||||
|
let r = ctx
|
||||||
|
.client
|
||||||
|
.list_objects_v2()
|
||||||
|
.bucket(&bucket)
|
||||||
|
.delimiter("/")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(r.contents.is_none());
|
||||||
|
|
||||||
|
let common_prefixes = r.common_prefixes.unwrap();
|
||||||
|
assert_eq!(common_prefixes.len(), 1);
|
||||||
|
assert_eq!(common_prefixes[0].prefix.as_deref().unwrap(), "a/");
|
||||||
|
}
|
||||||
|
|
||||||
|
// With delimiter b/
|
||||||
|
{
|
||||||
|
let r = ctx
|
||||||
|
.client
|
||||||
|
.list_objects_v2()
|
||||||
|
.bucket(&bucket)
|
||||||
|
.delimiter("b/")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let contents = r.contents.unwrap();
|
||||||
|
assert_eq!(contents.len(), 2);
|
||||||
|
assert_eq!(contents[0].key.as_deref().unwrap(), "a/");
|
||||||
|
assert_eq!(contents[1].key.as_deref().unwrap(), "a/c/");
|
||||||
|
|
||||||
|
let common_prefixes = r.common_prefixes.unwrap();
|
||||||
|
assert_eq!(common_prefixes.len(), 2);
|
||||||
|
assert_eq!(common_prefixes[0].prefix.as_deref().unwrap(), "a/b/");
|
||||||
|
assert_eq!(common_prefixes[1].prefix.as_deref().unwrap(), "a/c/b/");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue