Fix calculation for buffer duration in basic-tutorial-8

We need to scale the number of samples, not the buffer size.
This commit is contained in:
Sebastian Dröge 2017-12-09 14:03:18 +02:00
parent 963557b79f
commit d7792a4fca

View file

@ -164,7 +164,7 @@ fn main() {
.mul_div_floor(data.num_samples, SAMPLE_RATE as u64)
.expect("u64 overflow");
let duration = gst::SECOND
.mul_div_floor(CHUNK_SIZE as u64, SAMPLE_RATE as u64)
.mul_div_floor(num_samples as u64, SAMPLE_RATE as u64)
.expect("u64 overflow");
{