1
0
Fork 0
mirror of https://github.com/alfg/mp4-rust.git synced 2025-02-23 16:26:15 +00:00

Removing unnecessary code and logic

This commit is contained in:
Stepan Murathanov 2025-01-27 18:13:06 +03:00
parent 7abff74e90
commit eeac7c127b
17 changed files with 372 additions and 3124 deletions

View file

@ -1,338 +0,0 @@
// use std::convert::TryInto;
// use std::env;
// use std::fs::File;
// use std::io::{self, BufReader};
// use std::path::Path;
// use anyhow::bail;
// use bytes::{BufMut, Bytes, BytesMut};
// use futures::SinkExt;
// use mp4::TrackType;
// use std::io::{Cursor, Write};
// use tokio_util::codec::Encoder;
// use bytes::Buf;
// use mpeg2ts::{
// es::{StreamId, StreamType},
// pes::PesHeader,
// time::{ClockReference, Timestamp},
// ts::{
// payload::{self, Pat, Pmt},
// AdaptationField, ContinuityCounter, EsInfo, Pid, ProgramAssociation,
// TransportScramblingControl, TsHeader, TsPacket, TsPacketWriter, TsPayload, VersionNumber,
// WriteTsPacket,
// },
// Error as TsError,
// };
// const PMT_PID: u16 = 4096;
// const VIDEO_ES_PID: u16 = 256;
// // const AUDIO_ES_PID: u16 = 258;
// const PES_VIDEO_STREAM_ID: u8 = 224;
// // const PES_AUDIO_STREAM_ID: u8 = 192;
// #[derive(Default)]
// pub struct TsEncoder {
// video_continuity_counter: ContinuityCounter,
// header_sent: bool,
// timestamp: i64,
// }
// impl TsEncoder {
// fn write_packet(
// &mut self,
// writer: &mut TsPacketWriter<impl Write>,
// pts: Timestamp,
// dts: Timestamp,
// data: &[u8],
// is_keyframe: bool,
// ) -> Result<(), TsError> {
// let mut header = Self::default_ts_header(VIDEO_ES_PID, self.video_continuity_counter)?;
// let mut buf = Cursor::new(data);
// let packet = {
// let data = payload::Bytes::new(&buf.chunk()[..buf.remaining().min(153)])?;
// buf.advance(data.len());
// TsPacket {
// header: header.clone(),
// adaptation_field: is_keyframe.then(|| AdaptationField {
// discontinuity_indicator: false,
// random_access_indicator: true,
// es_priority_indicator: false,
// pcr: Some(ClockReference::from(pts)),
// opcr: None,
// splice_countdown: None,
// transport_private_data: Vec::new(),
// extension: None,
// }),
// payload: Some(TsPayload::Pes(payload::Pes {
// header: PesHeader {
// stream_id: StreamId::new(PES_VIDEO_STREAM_ID),
// priority: false,
// data_alignment_indicator: false,
// copyright: false,
// original_or_copy: false,
// pts: Some(pts),
// dts: if pts == dts { None } else { Some(dts) },
// escr: None,
// },
// pes_packet_len: 0,
// data,
// })),
// }
// };
// writer.write_ts_packet(&packet)?;
// header.continuity_counter.increment();
// while buf.has_remaining() {
// let raw_payload =
// payload::Bytes::new(&buf.chunk()[..buf.remaining().min(payload::Bytes::MAX_SIZE)])?;
// buf.advance(raw_payload.len());
// let packet = TsPacket {
// header: header.clone(),
// adaptation_field: None,
// payload: Some(TsPayload::Raw(raw_payload)),
// };
// writer.write_ts_packet(&packet)?;
// header.continuity_counter.increment();
// }
// self.video_continuity_counter = header.continuity_counter;
// Ok(())
// }
// pub fn new(timestamp: i64) -> TsEncoder {
// Self {
// video_continuity_counter: Default::default(),
// header_sent: false,
// timestamp,
// }
// }
// }
// struct Frame {
// pub pts: i64,
// pub dts: i64,
// pub body: Bytes,
// pub key: bool,
// }
// impl<'a> Encoder<&'a Frame> for TsEncoder {
// type Error = anyhow::Error;
// fn encode(&mut self, frame: &'a Frame, dst: &mut BytesMut) -> Result<(), Self::Error> {
// let mut writer = TsPacketWriter::new(dst.writer());
// if !self.header_sent {
// self.header_sent = true;
// self.write_header(&mut writer, StreamType::H264)?;
// }
// let pts = frame.pts - self.timestamp;
// let dts = frame.dts - self.timestamp;
// let p_ts = Timestamp::new((pts as u64 * 9) / 100 + 1).map_err(TsError::from)?;
// let d_ts = Timestamp::new((dts as u64 * 9) / 100 + 1).map_err(TsError::from)?;
// self.write_packet(&mut writer, p_ts, d_ts, &frame.body, frame.key)?;
// Ok(())
// }
// }
// impl TsEncoder {
// #[inline]
// fn write_header<W: WriteTsPacket>(
// &mut self,
// writer: &mut W,
// stream_type: StreamType,
// ) -> Result<(), TsError> {
// self.write_packets(
// writer,
// [
// &Self::default_pat_packet(),
// &Self::default_pmt_packet(stream_type),
// ],
// )?;
// Ok(())
// }
// #[inline]
// fn write_packets<'a, W: WriteTsPacket, P: IntoIterator<Item = &'a TsPacket>>(
// &mut self,
// writer: &mut W,
// packets: P,
// ) -> Result<(), TsError> {
// packets
// .into_iter()
// .try_for_each(|pak| writer.write_ts_packet(pak))?;
// Ok(())
// }
// fn default_ts_header(
// pid: u16,
// continuity_counter: ContinuityCounter,
// ) -> Result<TsHeader, TsError> {
// Ok(TsHeader {
// transport_error_indicator: false,
// transport_priority: false,
// pid: Pid::new(pid)?,
// transport_scrambling_control: TransportScramblingControl::NotScrambled,
// continuity_counter,
// })
// }
// fn default_pat_packet() -> TsPacket {
// TsPacket {
// header: Self::default_ts_header(0, Default::default()).unwrap(),
// adaptation_field: None,
// payload: Some(TsPayload::Pat(Pat {
// transport_stream_id: 1,
// version_number: VersionNumber::default(),
// table: vec![ProgramAssociation {
// program_num: 1,
// program_map_pid: Pid::new(PMT_PID).unwrap(),
// }],
// })),
// }
// }
// fn default_pmt_packet(stream_type: StreamType) -> TsPacket {
// TsPacket {
// header: Self::default_ts_header(PMT_PID, Default::default()).unwrap(),
// adaptation_field: None,
// payload: Some(TsPayload::Pmt(Pmt {
// program_num: 1,
// pcr_pid: Some(Pid::new(VIDEO_ES_PID).unwrap()),
// version_number: VersionNumber::default(),
// program_info: vec![],
// es_info: vec![EsInfo {
// stream_type,
// elementary_pid: Pid::new(VIDEO_ES_PID).unwrap(),
// descriptors: vec![],
// }],
// })),
// }
// }
// }
// #[tokio::main(flavor = "current_thread")]
// async fn main() {
// let args: Vec<String> = env::args().collect();
// if args.len() < 2 {
// println!("Usage: mp4sample <filename>");
// std::process::exit(1);
// }
// if let Err(err) = samples(&args[1]).await {
// let _ = writeln!(io::stderr(), "{}", err);
// }
// }
// async fn samples<P: AsRef<Path>>(filename: &P) -> anyhow::Result<()> {
// let mut ts_name = filename.as_ref().parent().unwrap().to_path_buf();
// ts_name.push(format!(
// "{}.ts",
// filename.as_ref().file_stem().unwrap().to_str().unwrap()
// ));
// let f = File::open(filename)?;
// let size = f.metadata()?.len();
// let reader = BufReader::new(f);
// let ts_file = tokio::fs::File::create(ts_name).await.unwrap();
// let mut ts = tokio_util::codec::FramedWrite::new(ts_file, TsEncoder::new(-1_400_000));
// let mut mp4 = mp4::Mp4Reader::read_header(reader, size)?;
// if let Some(track_id) = mp4.tracks().iter().find_map(|(k, v)| {
// v.track_type()
// .ok()
// .and_then(|x| matches!(x, TrackType::Video).then_some(*k))
// }) {
// let sample_count = mp4.sample_count(track_id).unwrap();
// let mut params = BytesMut::new();
// let track = mp4.tracks().get(&track_id).unwrap();
// let timescale = track.timescale();
// if let Ok(sps) = track.sequence_parameter_set() {
// params.put_slice(&[0, 0, 0, 1]);
// params.put_slice(sps);
// }
// if let Ok(pps) = track.picture_parameter_set() {
// params.put_slice(&[0, 0, 0, 1]);
// params.put_slice(pps);
// }
// for sample_idx in 0..sample_count {
// let sample_id = sample_idx + 1;
// let sample = mp4.read_sample(track_id, sample_id);
// if let Some(samp) = sample.unwrap() {
// let dts = (samp.start_time as i64 * 1_000_000) / timescale as i64;
// let pts = (samp.start_time as i64 + samp.rendering_offset as i64) * 1_000_000
// / timescale as i64;
// let mut bytes = BytesMut::from(samp.bytes.as_ref());
// convert_h264(&mut bytes).unwrap();
// let mut body = BytesMut::with_capacity(bytes.len() + 6);
// if sample_idx == 0 {
// body.put_slice(&params);
// }
// body.put_slice(&[0, 0, 0, 1, 9, 240]);
// body.put_slice(&bytes);
// ts.send(&Frame {
// pts,
// dts,
// body: body.freeze(),
// key: samp.is_sync,
// })
// .await?;
// }
// }
// }
// Ok(())
// }
// fn convert_h264(data: &mut [u8]) -> anyhow::Result<()> {
// // TODO:
// // * For each IDR frame, copy the SPS and PPS from the stream's
// // parameters, rather than depend on it being present in the frame
// // already. In-band parameters aren't guaranteed. This is awkward
// // with h264_reader v0.5's h264_reader::avcc::AvcDecoderRecord because it
// // strips off the NAL header byte from each parameter. The next major
// // version shouldn't do this.
// // * Copy only the slice data. In particular, don't copy SEI, which confuses
// // Safari: <https://github.com/scottlamb/retina/issues/60#issuecomment-1178369955>
// let mut i = 0;
// while i < data.len() - 3 {
// // Replace each NAL's length with the Annex B start code b"\x00\x00\x00\x01".
// let bytes = &mut data[i..i + 4];
// let nalu_length = u32::from_be_bytes(bytes.try_into().unwrap()) as usize;
// bytes.copy_from_slice(&[0, 0, 0, 1]);
// i += 4 + nalu_length;
// if i > data.len() {
// bail!("partial nal body");
// }
// }
// if i < data.len() {
// bail!("partial nal body");
// }
// Ok(())
// }
fn main() {}

View file

@ -1,95 +0,0 @@
// use std::env;
// use std::fs::File;
// use std::io::prelude::*;
// use std::io::{self, BufReader, BufWriter};
// use std::path::Path;
// use mp4::{
// AacConfig, AvcConfig, HevcConfig, MediaConfig, MediaType, Mp4Config, Result, TrackConfig,
// TtxtConfig, Vp9Config,
// };
// fn main() {
// let args: Vec<String> = env::args().collect();
// if args.len() < 3 {
// println!("Usage: mp4copy <source file> <target file>");
// std::process::exit(1);
// }
// if let Err(err) = copy(&args[1], &args[2]) {
// let _ = writeln!(io::stderr(), "{}", err);
// }
// }
// fn copy<P: AsRef<Path>>(src_filename: &P, dst_filename: &P) -> Result<()> {
// let src_file = File::open(src_filename)?;
// let size = src_file.metadata()?.len();
// let reader = BufReader::new(src_file);
// let dst_file = File::create(dst_filename)?;
// let writer = BufWriter::new(dst_file);
// let mut mp4_reader = mp4::Mp4Reader::read_header(reader, size)?;
// let mut mp4_writer = mp4::Mp4Writer::write_start(
// writer,
// &Mp4Config {
// major_brand: *mp4_reader.major_brand(),
// minor_version: mp4_reader.minor_version(),
// compatible_brands: mp4_reader.compatible_brands().to_vec(),
// timescale: mp4_reader.timescale(),
// },
// )?;
// // TODO interleaving
// for track in mp4_reader.tracks().values() {
// let media_conf = match track.media_type()? {
// MediaType::H264 => MediaConfig::AvcConfig(AvcConfig {
// width: track.width(),
// height: track.height(),
// seq_param_set: track.sequence_parameter_set()?.to_vec(),
// pic_param_set: track.picture_parameter_set()?.to_vec(),
// }),
// MediaType::H265 => MediaConfig::HevcConfig(HevcConfig {
// width: track.width(),
// height: track.height(),
// }),
// MediaType::VP9 => MediaConfig::Vp9Config(Vp9Config {
// width: track.width(),
// height: track.height(),
// }),
// MediaType::AAC => MediaConfig::AacConfig(AacConfig {
// bitrate: track.bitrate(),
// profile: track.audio_profile()?,
// freq_index: track.sample_freq_index()?,
// chan_conf: track.channel_config()?,
// }),
// MediaType::TTXT => MediaConfig::TtxtConfig(TtxtConfig {}),
// };
// let track_conf = TrackConfig {
// track_type: track.track_type()?,
// timescale: track.timescale(),
// language: track.language().to_string(),
// media_conf,
// };
// mp4_writer.add_track(&track_conf)?;
// }
// for track_id in mp4_reader.tracks().keys().copied().collect::<Vec<u32>>() {
// let sample_count = mp4_reader.sample_count(track_id)?;
// for sample_idx in 0..sample_count {
// let sample_id = sample_idx + 1;
// let sample = mp4_reader.read_sample(track_id, sample_id)?.unwrap();
// mp4_writer.write_sample(track_id, &sample)?;
// // println!("copy {}:({})", sample_id, sample);
// }
// }
// mp4_writer.write_end()?;
// Ok(())
// }
fn main() {}

View file

@ -1,144 +0,0 @@
// use std::env;
// use std::fs::File;
// use std::io::prelude::*;
// use std::io::{self, BufReader};
// use std::path::Path;
// use mp4::{Mp4Box, Result};
// fn main() {
// let args: Vec<String> = env::args().collect();
// if args.len() < 2 {
// println!("Usage: mp4dump <filename>");
// std::process::exit(1);
// }
// if let Err(err) = dump(&args[1]) {
// let _ = writeln!(io::stderr(), "{}", err);
// }
// }
// fn dump<P: AsRef<Path>>(filename: &P) -> Result<()> {
// let f = File::open(filename)?;
// let boxes = get_boxes(f)?;
// // print out boxes
// for b in boxes.iter() {
// println!("[{}] size={} {}", b.name, b.size, b.summary);
// }
// Ok(())
// }
// #[derive(Debug, Clone, PartialEq, Default)]
// pub struct Box {
// name: String,
// size: u64,
// summary: String,
// indent: u32,
// }
// fn get_boxes(file: File) -> Result<Vec<Box>> {
// let size = file.metadata()?.len();
// let reader = BufReader::new(file);
// let mp4 = mp4::Mp4Reader::read_header(reader, size)?;
// // collect known boxes
// let mut boxes = vec![
// build_box(&mp4.header.ftyp),
// build_box(&mp4.header.moov),
// build_box(&mp4.header.moov.mvhd),
// ];
// if let Some(ref mvex) = &mp4.header.moov.mvex {
// boxes.push(build_box(mvex));
// if let Some(mehd) = &mvex.mehd {
// boxes.push(build_box(mehd));
// }
// boxes.push(build_box(&mvex.trex));
// }
// // trak.
// for track in mp4.tracks().values() {
// boxes.push(build_box(&track.trak));
// boxes.push(build_box(&track.trak.tkhd));
// if let Some(ref edts) = track.trak.edts {
// boxes.push(build_box(edts));
// if let Some(ref elst) = edts.elst {
// boxes.push(build_box(elst));
// }
// }
// // trak.mdia
// let mdia = &track.trak.mdia;
// boxes.push(build_box(mdia));
// boxes.push(build_box(&mdia.mdhd));
// boxes.push(build_box(&mdia.hdlr));
// boxes.push(build_box(&track.trak.mdia.minf));
// // trak.mdia.minf
// let minf = &track.trak.mdia.minf;
// if let Some(ref vmhd) = &minf.vmhd {
// boxes.push(build_box(vmhd));
// }
// if let Some(ref smhd) = &minf.smhd {
// boxes.push(build_box(smhd));
// }
// // trak.mdia.minf.stbl
// let stbl = &track.trak.mdia.minf.stbl;
// boxes.push(build_box(stbl));
// boxes.push(build_box(&stbl.stsd));
// if let Some(ref avc1) = &stbl.stsd.avc1 {
// boxes.push(build_box(avc1));
// }
// if let Some(ref hev1) = &stbl.stsd.hev1 {
// boxes.push(build_box(hev1));
// }
// if let Some(ref mp4a) = &stbl.stsd.mp4a {
// boxes.push(build_box(mp4a));
// }
// boxes.push(build_box(&stbl.stts));
// if let Some(ref ctts) = &stbl.ctts {
// boxes.push(build_box(ctts));
// }
// if let Some(ref stss) = &stbl.stss {
// boxes.push(build_box(stss));
// }
// boxes.push(build_box(&stbl.stsc));
// boxes.push(build_box(&stbl.stsz));
// if let Some(ref stco) = &stbl.stco {
// boxes.push(build_box(stco));
// }
// if let Some(ref co64) = &stbl.co64 {
// boxes.push(build_box(co64));
// }
// }
// // If fragmented, add moof boxes.
// for moof in mp4.header.moofs.iter() {
// boxes.push(build_box(moof));
// boxes.push(build_box(&moof.mfhd));
// for traf in moof.trafs.iter() {
// boxes.push(build_box(traf));
// boxes.push(build_box(&traf.tfhd));
// if let Some(ref trun) = &traf.trun {
// boxes.push(build_box(trun));
// }
// }
// }
// Ok(boxes)
// }
// fn build_box<M: Mp4Box + std::fmt::Debug>(m: &M) -> Box {
// Box {
// name: m.box_type().to_string(),
// size: m.box_size(),
// summary: m.summary().unwrap(),
// indent: 0,
// }
// }
fn main() {}

View file

@ -1,144 +0,0 @@
// use std::env;
// use std::fs::File;
// use std::io::prelude::*;
// use std::io::{self, BufReader};
// use std::path::Path;
// use mp4::{Error, Mp4Track, Result, TrackType};
// fn main() {
// let args: Vec<String> = env::args().collect();
// if args.len() < 2 {
// println!("Usage: mp4info <filename>");
// std::process::exit(1);
// }
// if let Err(err) = info(&args[1]) {
// let _ = writeln!(io::stderr(), "{}", err);
// }
// }
// fn info<P: AsRef<Path>>(filename: &P) -> Result<()> {
// let f = File::open(filename)?;
// let size = f.metadata()?.len();
// let reader = BufReader::new(f);
// let mp4 = mp4::Mp4Reader::read_header(reader, size)?;
// println!("File:");
// println!(" file size: {}", mp4.size());
// println!(" major_brand: {}", mp4.major_brand());
// let mut compatible_brands = String::new();
// for brand in mp4.compatible_brands().iter() {
// compatible_brands.push_str(&brand.to_string());
// compatible_brands.push(' ');
// }
// println!(" compatible_brands: {}\n", compatible_brands);
// println!("Movie:");
// println!(" version: {}", mp4.header.moov.mvhd.version);
// println!(
// " creation time: {}",
// creation_time(mp4.header.moov.mvhd.creation_time)
// );
// println!(" duration: {:?}", mp4.duration());
// println!(" fragments: {:?}", mp4.is_fragmented());
// println!(" timescale: {:?}\n", mp4.timescale());
// println!("Found {} Tracks", mp4.tracks().len());
// for track in mp4.tracks().values() {
// let media_info = match track.track_type()? {
// TrackType::Video => video_info(track),
// TrackType::Audio => audio_info(track),
// TrackType::Subtitle => subtitle_info(track),
// };
// println!(
// " Track: #{}({}) {}: {}",
// track.track_id(),
// track.language(),
// track.track_type()?,
// media_info.unwrap_or_else(|e| e.to_string())
// );
// }
// Ok(())
// }
// fn video_info(track: &Mp4Track) -> Result<String> {
// if track.trak.mdia.minf.stbl.stsd.avc1.is_some() {
// Ok(format!(
// "{} ({}) ({:?}), {}x{}, {} kb/s, {:.2} fps",
// track.media_type()?,
// track.video_profile()?,
// track.box_type()?,
// track.width(),
// track.height(),
// track.bitrate() / 1000,
// track.frame_rate()
// ))
// } else {
// Ok(format!(
// "{} ({:?}), {}x{}, {} kb/s, {:.2} fps",
// track.media_type()?,
// track.box_type()?,
// track.width(),
// track.height(),
// track.bitrate() / 1000,
// track.frame_rate()
// ))
// }
// }
// fn audio_info(track: &Mp4Track) -> Result<String> {
// if let Some(ref mp4a) = track.trak.mdia.minf.stbl.stsd.mp4a {
// if mp4a.esds.is_some() {
// let profile = match track.audio_profile() {
// Ok(val) => val.to_string(),
// _ => "-".to_string(),
// };
// let channel_config = match track.channel_config() {
// Ok(val) => val.to_string(),
// _ => "-".to_string(),
// };
// Ok(format!(
// "{} ({}) ({:?}), {} Hz, {}, {} kb/s",
// track.media_type()?,
// profile,
// track.box_type()?,
// track.sample_freq_index()?.freq(),
// channel_config,
// track.bitrate() / 1000
// ))
// } else {
// Ok(format!(
// "{} ({:?}), {} kb/s",
// track.media_type()?,
// track.box_type()?,
// track.bitrate() / 1000
// ))
// }
// } else {
// Err(Error::InvalidData("mp4a box not found"))
// }
// }
// fn subtitle_info(track: &Mp4Track) -> Result<String> {
// if track.trak.mdia.minf.stbl.stsd.tx3g.is_some() {
// Ok(format!("{} ({:?})", track.media_type()?, track.box_type()?,))
// } else {
// Err(Error::InvalidData("tx3g box not found"))
// }
// }
// fn creation_time(creation_time: u64) -> u64 {
// // convert from MP4 epoch (1904-01-01) to Unix epoch (1970-01-01)
// if creation_time >= 2082844800 {
// creation_time - 2082844800
// } else {
// creation_time
// }
// }
fn main() {}

View file

@ -2,7 +2,7 @@ use std::io::prelude::*;
use std::path::Path;
use std::{env, io};
use mp4::{MemoryStorageError, TrackType};
use mp4::{error::MemoryStorageError, TrackType};
use tokio::fs::File;
use tokio::io::BufReader;
@ -34,9 +34,11 @@ async fn samples<P: AsRef<Path>>(filename: &P) -> Result<(), mp4::Error<MemorySt
.map(|(k, _)| *k);
let track_id = keys.next().unwrap();
let track = mp4_file.tracks.get(&track_id).unwrap();
let samples_len = mp4_file.tracks.get(&track_id).unwrap().samples.len();
for idx in 0..samples_len {
let samp = mp4_file.tracks.get(&track_id).unwrap().samples[idx].clone();
for (idx, samp) in track.samples.iter().enumerate() {
let data = mp4_file
.read_sample_data(track_id, idx)
.await?

View file

@ -1,24 +0,0 @@
use mp4::{Mp4Config, Mp4Writer};
use std::io::Cursor;
fn main() -> mp4::Result<()> {
let config = Mp4Config {
major_brand: str::parse("isom").unwrap(),
minor_version: 512,
compatible_brands: vec![
str::parse("isom").unwrap(),
str::parse("iso2").unwrap(),
str::parse("avc1").unwrap(),
str::parse("mp41").unwrap(),
],
timescale: 1000,
};
let data = Cursor::new(Vec::<u8>::new());
let mut writer = Mp4Writer::write_start(data, &config)?;
writer.write_end()?;
let data: Vec<u8> = writer.into_writer().into_inner();
println!("{:?}", data);
Ok(())
}

View file

@ -1,30 +0,0 @@
use mp4::Mp4Header;
use std::env;
use tokio::fs::File;
#[tokio::main]
async fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
println!("Usage: simple <filename>");
std::process::exit(1);
}
let filename = &args[1];
let mut f = File::open(filename).await.unwrap();
let mp4 = Mp4Header::read(&mut f, Some(())).await.unwrap();
println!("Major Brand: {:?}", mp4.major_brand());
for track in mp4.tracks().values() {
println!(
"Track: #{}({}) {} {}",
track.track_id(),
track.language(),
track.track_type().unwrap(),
track.box_type().unwrap(),
);
}
}

View file

@ -1,308 +0,0 @@
use std::{
collections::HashMap,
io::{Read, Seek},
time::Duration,
};
use tokio::io::{AsyncRead, AsyncReadExt};
use crate::{
BlockReader, BoxHeader, BoxType, EmsgBox, Error, FourCC, FtypBox, MetaBox, Metadata, MoofBox,
MoovBox, Mp4Sample, Mp4Track,
};
#[derive(Debug, Clone)]
pub struct Mp4Header {
pub ftyp: Option<FtypBox>,
pub moov: Option<MoovBox>,
pub moofs: Vec<MoofBox>,
pub emsgs: Vec<EmsgBox>,
tracks: HashMap<u32, Mp4Track>,
}
// async fn read
impl Mp4Header {
pub async fn read<R, C>(reader: &mut R, _cache: Option<C>) -> Result<Self, Error>
where
R: AsyncRead + Unpin,
// C: AsyncRead + AsyncWrite + Unpin,
{
let mut ftyp = None;
let mut moov = None;
let mut moofs = Vec::new();
// let mut moof_offsets = Vec::new();
let mut emsgs = Vec::new();
let mut buff = Vec::with_capacity(1024);
while let Some(BoxHeader { kind, size: s }) = BoxHeader::read(reader).await? {
if buff.len() < s as usize {
buff.resize(s as usize, 0);
}
// Match and parse the atom boxes.
match kind {
BoxType::FtypBox => {
reader.read_exact(&mut buff[0..s as usize]).await?;
ftyp = Some(FtypBox::read_block(&mut &buff[0..s as usize])?);
println!("{:?}", ftyp);
}
BoxType::MoovBox => {
reader.read_exact(&mut buff[0..s as usize]).await?;
println!("moov");
moov = Some(MoovBox::read_block(&mut &buff[0..s as usize])?);
}
// BoxType::MoofBox => {
// let moof_offset = reader.stream_position()? - 8;
// let moof = MoofBox::read_box(reader, s)?;
// moofs.push(moof);
// moof_offsets.push(moof_offset);
// }
// BoxType::EmsgBox => {
// let emsg = EmsgBox::read_box(reader, s)?;
// emsgs.push(emsg);
// }
// BoxType::MdatBox => {
// skip_box(reader, s)?;
// }
// BoxType::FreeBox => {
// reader.read_exact(buf)
// skip_box(reader, s)?;
// }
bt => {
println!("skip {:?}", bt);
let mut buff = [0u8; 1024];
let mut read = 0;
for chunk in (0..s).step_by(1024) {
if chunk == 0 {
continue;
}
reader.read_exact(&mut buff).await?;
read += buff.len();
}
if s as usize - read > 0 {
reader.read_exact(&mut buff[0..s as usize - read]).await?;
}
}
}
}
if ftyp.is_none() {
return Err(Error::BoxNotFound(BoxType::FtypBox));
}
if moov.is_none() {
return Err(Error::BoxNotFound(BoxType::MoovBox));
}
let mut tracks = if let Some(ref moov) = moov {
if moov.traks.iter().any(|trak| trak.tkhd.track_id == 0) {
return Err(Error::InvalidData("illegal track id 0"));
}
moov.traks
.iter()
.map(|trak| (trak.tkhd.track_id, Mp4Track::from(trak)))
.collect()
} else {
HashMap::new()
};
// Update tracks if any fragmented (moof) boxes are found.
// if !moofs.is_empty() {
// let mut default_sample_duration = 0;
// if let Some(ref moov) = moov {
// if let Some(ref mvex) = &moov.mvex {
// default_sample_duration = mvex.trex.default_sample_duration
// }
// }
// for (moof, moof_offset) in moofs.iter().zip(moof_offsets) {
// for traf in moof.trafs.iter() {
// let track_id = traf.tfhd.track_id;
// if let Some(track) = tracks.get_mut(&track_id) {
// track.default_sample_duration = default_sample_duration;
// track.moof_offsets.push(moof_offset);
// track.trafs.push(traf.clone())
// } else {
// return Err(Error::TrakNotFound(track_id));
// }
// }
// }
// }
Ok(Mp4Header {
ftyp,
moov,
moofs,
emsgs,
tracks,
})
}
#[inline]
pub fn major_brand(&self) -> Option<&FourCC> {
Some(&self.ftyp.as_ref()?.major_brand)
}
pub fn minor_version(&self) -> Option<u32> {
Some(self.ftyp.as_ref()?.minor_version)
}
pub fn compatible_brands(&self) -> Option<&[FourCC]> {
Some(&self.ftyp.as_ref()?.compatible_brands)
}
pub fn duration(&self) -> Option<Duration> {
self.moov.as_ref().map(|moov| {
Duration::from_millis(moov.mvhd.duration * 1000 / moov.mvhd.timescale as u64)
})
}
pub fn timescale(&self) -> Option<u32> {
Some(self.moov.as_ref()?.mvhd.timescale)
}
pub fn is_fragmented(&self) -> bool {
!self.moofs.is_empty()
}
pub fn tracks(&self) -> &HashMap<u32, Mp4Track> {
&self.tracks
}
pub fn sample_count(&self, track_id: u32) -> Result<u32, Error> {
if let Some(track) = self.tracks.get(&track_id) {
Ok(track.sample_count())
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn read_sample<R: Read + Seek>(
&mut self,
reader: &mut R,
track_id: u32,
sample_id: u32,
) -> Result<Option<Mp4Sample>, Error> {
if let Some(track) = self.tracks.get(&track_id) {
track.read_sample(reader, sample_id)
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn sample_offset(&mut self, track_id: u32, sample_id: u32) -> Result<u64, Error> {
if let Some(track) = self.tracks.get(&track_id) {
track.sample_offset(sample_id)
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn metadata(&self) -> Option<impl Metadata<'_>> {
self.moov.as_ref()?.udta.as_ref().and_then(|udta| {
udta.meta.as_ref().and_then(|meta| match meta {
MetaBox::Mdir { ilst } => ilst.as_ref(),
_ => None,
})
})
}
}
#[derive(Debug)]
pub struct AsyncMp4Reader<R> {
pub header: Mp4Header,
reader: R,
}
impl<R: AsyncRead + Unpin> AsyncMp4Reader<R> {
pub fn from_reader(reader: R, header: Mp4Header) -> Self {
Self { reader, header }
}
pub async fn read_header(mut reader: R) -> Result<Self, Error> {
Ok(AsyncMp4Reader {
header: Mp4Header::read(&mut reader, Some(())).await?,
reader,
})
}
// pub fn read_fragment_header<FR: Read + Seek>(
// &self,
// mut reader: FR,
// size: u64,
// ) -> Result<Mp4Reader<FR>> {
// Ok(Mp4Reader {
// header: self.header.read_fragment(&mut reader, size)?,
// reader,
// })
// }
// pub fn size(&self) -> u64 {
// self.header.size()
// }
pub fn major_brand(&self) -> Option<&FourCC> {
self.header.major_brand()
}
pub fn minor_version(&self) -> Option<u32> {
self.header.minor_version()
}
pub fn compatible_brands(&self) -> Option<&[FourCC]> {
self.header.compatible_brands()
}
pub fn duration(&self) -> Option<Duration> {
self.header.duration()
}
pub fn timescale(&self) -> Option<u32> {
self.header.timescale()
}
pub fn is_fragmented(&self) -> bool {
self.header.is_fragmented()
}
pub fn tracks(&self) -> &HashMap<u32, Mp4Track> {
self.header.tracks()
}
pub fn sample_count(&self, track_id: u32) -> Result<u32, Error> {
self.header.sample_count(track_id)
}
pub fn read_sample(
&mut self,
track_id: u32,
sample_id: u32,
) -> Result<Option<Mp4Sample>, Error> {
self.header
.read_sample(&mut self.reader, track_id, sample_id)
}
pub fn sample_offset(&mut self, track_id: u32, sample_id: u32) -> Result<u64, Error> {
self.header.sample_offset(track_id, sample_id)
}
}
pub struct Mp4Track {}
impl<R> AsyncMp4Reader<R> {
pub fn metadata(&self) -> impl Metadata<'_> {
self.header.metadata()
}
}

View file

@ -38,6 +38,15 @@ pub enum BoxError {
TrakNotFound(u32),
}
#[derive(thiserror::Error, Debug)]
pub enum MemoryStorageError {
#[error("IoError: {0}")]
IoError(#[from] std::io::Error),
#[error("data buffer with index {0} not found")]
DataBufferNotFound(usize),
}
#[derive(Error, Debug)]
pub enum Error<E> {
#[error("{0}")]

View file

@ -1,18 +1,13 @@
use bytes::Bytes;
use futures::Future;
use std::collections::{BTreeSet, HashMap};
use std::iter::FromIterator;
use std::ops::Range;
use std::sync::Arc;
use tokio::io::{AsyncRead, AsyncReadExt};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, SeekFrom};
use crate::ctts::CttsEntry;
use crate::error::BoxError;
use crate::stsc::StscEntry;
use crate::stts::SttsEntry;
use crate::{
BlockReader, BoxHeader, BoxType, EmsgBox, Error, FtypBox, MoofBox, MoovBox, Mp4Sample,
TrackType,
};
use crate::error::{BoxError, MemoryStorageError};
use crate::Mp4Track;
use crate::{BlockReader, BoxHeader, BoxType, EmsgBox, FtypBox, MoofBox, MoovBox};
pub trait DataStorage {
type Error;
@ -30,15 +25,6 @@ pub trait DataStorage {
) -> impl Future<Output = Result<Bytes, Self::Error>>;
}
#[derive(thiserror::Error, Debug)]
pub enum MemoryStorageError {
#[error("IoError: {0}")]
IoError(#[from] std::io::Error),
#[error("data buffer with index {0} not found")]
DataBufferNotFound(usize),
}
#[derive(Default)]
pub struct MemoryStorage {
pub data: Vec<Bytes>,
@ -72,36 +58,33 @@ impl DataStorage for MemoryStorage {
}
}
enum DataBlockBody<I> {
enum DataBlockBody {
Memory(Bytes),
Storage(I),
Reader,
}
pub struct DataBlock<I> {
pub struct DataBlock {
kind: BoxType,
offset: u64,
size: u64,
buffer: DataBlockBody<I>,
buffer: DataBlockBody,
}
pub struct Mp4File<'a, R, S = MemoryStorage>
pub struct Mp4File<'a, R>
where
R: AsyncRead + Unpin,
S: DataStorage,
R: AsyncRead + AsyncSeek + Unpin,
{
pub ftyp: Option<FtypBox>,
pub emsgs: Vec<EmsgBox>,
pub tracks: HashMap<u32, Mp4Track>,
pub reader: &'a mut R,
pub offsets: BTreeSet<u64>,
pub data_blocks: Vec<DataBlock<S::Id>>,
pub data_storage: S,
pub data_blocks: Vec<DataBlock>,
}
impl<'a, R> Mp4File<'a, R, MemoryStorage>
impl<'a, R> Mp4File<'a, R>
where
R: AsyncRead + Unpin + 'a,
R: AsyncRead + Unpin + AsyncSeek + 'a,
{
pub fn new(reader: &'a mut R) -> Self {
Self {
@ -111,29 +94,15 @@ where
reader,
offsets: BTreeSet::new(),
data_blocks: Vec::new(),
data_storage: MemoryStorage::default(),
}
}
}
impl<'a, R, S> Mp4File<'a, R, S>
impl<'a, R> Mp4File<'a, R>
where
R: AsyncRead + Unpin + 'a,
S: DataStorage,
R: AsyncRead + Unpin + AsyncSeek + 'a,
{
pub fn with_storage(reader: &'a mut R, data_storage: S) -> Self {
Self {
ftyp: None,
emsgs: Vec::new(),
tracks: HashMap::new(),
reader,
offsets: BTreeSet::new(),
data_blocks: Vec::new(),
data_storage,
}
}
pub async fn read_header(&mut self) -> Result<bool, Error<S::Error>> {
pub async fn read_header(&mut self) -> Result<bool, BoxError> {
let mut buff = Vec::with_capacity(8192);
let mut got_moov = false;
let mut offset = 0u64;
@ -148,7 +117,6 @@ where
if buff.len() < s as usize {
buff.resize(s as usize, 0);
}
self.reader.read_exact(&mut buff[0..s as usize]).await?;
offset += s;
@ -217,24 +185,18 @@ where
Ok(got_moov)
}
async fn skip_box(&mut self, bt: BoxType, size: u64) -> Result<(), Error<S::Error>> {
async fn skip_box(&mut self, bt: BoxType, size: u64) -> Result<(), BoxError> {
println!("skip {:?}", bt);
tokio::io::copy(&mut (&mut self.reader).take(size), &mut tokio::io::empty()).await?;
Ok(())
}
async fn save_box(
&mut self,
kind: BoxType,
size: u64,
offset: u64,
) -> Result<(), Error<S::Error>> {
async fn save_box(&mut self, kind: BoxType, size: u64, offset: u64) -> Result<(), BoxError> {
println!("data_block {:?} {} - {}", kind, offset, offset + size);
if size < 128 * 1024 * 1024 {
let mut buffer = Vec::new();
tokio::io::copy(&mut self.reader.take(size), &mut buffer).await?;
self.data_blocks.push(DataBlock {
kind,
offset,
@ -242,24 +204,18 @@ where
buffer: DataBlockBody::Memory(buffer.into()),
});
} else {
let id = self
.data_storage
.save_data(&mut self.reader.take(size))
.await
.map_err(Error::DataStorageError)?;
self.data_blocks.push(DataBlock {
kind,
offset,
size,
buffer: DataBlockBody::Storage(id),
buffer: DataBlockBody::Reader,
});
}
Ok(())
}
fn set_moov(&mut self, moov: MoovBox) -> Result<(), Error<S::Error>> {
fn set_moov(&mut self, moov: MoovBox) -> Result<(), BoxError> {
for trak in moov.traks {
self.tracks
.insert(trak.tkhd.track_id, Mp4Track::new(trak, &mut self.offsets)?);
@ -268,7 +224,7 @@ where
Ok(())
}
fn add_moof(&mut self, offset: u64, moof: MoofBox) -> Result<(), Error<S::Error>> {
fn add_moof(&mut self, offset: u64, moof: MoofBox) -> Result<(), BoxError> {
for traf in moof.trafs {
let track_id = traf.tfhd.track_id;
@ -284,10 +240,10 @@ where
#[inline]
pub async fn read_sample_data(
&self,
&mut self,
track_id: u32,
sample_idx: usize,
) -> Result<Option<Bytes>, Error<S::Error>> {
) -> Result<Option<Bytes>, BoxError> {
let Some(track) = self.tracks.get(&track_id) else {
return Ok(None);
};
@ -300,19 +256,17 @@ where
let range = block.offset..block.offset + block.size;
if range.contains(&sample.offset) {
let offset = sample.offset - block.offset;
return Ok(Some(match &block.buffer {
DataBlockBody::Storage(id) => self
.data_storage
.read_data(id, offset..offset + sample.size as u64)
.await
.map_err(Error::DataStorageError)?,
DataBlockBody::Memory(mem) => {
let offset = sample.offset - block.offset;
mem.slice(offset as usize..offset as usize + sample.size as usize)
}
DataBlockBody::Reader => todo!(),
DataBlockBody::Reader => {
let mut buff = vec![0u8; sample.size as _];
self.reader.seek(SeekFrom::Start(sample.offset)).await?;
self.reader.read_exact(&mut buff).await?;
Bytes::from_iter(buff)
}
}));
}
}
@ -320,359 +274,45 @@ where
Ok(None)
}
pub fn into_streams<T: AsRef<[u32]>>(
self,
tracks: T,
) -> impl Iterator<
Item = (
u32,
impl futures::Stream<Item = Result<Mp4Sample, Error<S::Error>>> + 'a,
),
>
where
S::Error: 'a,
{
let storage = Arc::new(self.data_storage);
let data_blocks = Arc::new(self.data_blocks);
// pub fn into_streams<T: AsRef<[u32]>>(
// self,
// tracks: T,
// ) -> impl Iterator<
// Item = (
// u32,
// impl futures::Stream<Item = Result<Mp4Sample, Error<S::Error>>> + 'a,
// ),
// >
// where
// S::Error: 'a,
// {
// let storage = Arc::new(self.data_storage);
// let data_blocks = Arc::new(self.data_blocks);
self.tracks
.into_iter()
.filter_map(move |(track_id, track)| {
if !tracks.as_ref().contains(&track_id) {
return None;
}
// self.tracks
// .into_iter()
// .filter_map(move |(track_id, track)| {
// if !tracks.as_ref().contains(&track_id) {
// return None;
// }
let storage = storage.clone();
let data_blocks = data_blocks.clone();
// let storage = storage.clone();
// let data_blocks = data_blocks.clone();
Some((
track_id,
async_stream::stream! {
for samp_offset in track.samples {
yield Ok(Mp4Sample {
start_time: samp_offset.start_time,
duration: samp_offset.duration,
rendering_offset: samp_offset.rendering_offset,
is_sync: samp_offset.is_sync,
bytes: Bytes::new(),
})
}
},
))
})
}
}
pub struct Mp4SampleOffset {
pub offset: u64,
pub size: u32,
pub duration: u32,
pub start_time: u64,
pub rendering_offset: i32,
pub is_sync: bool,
pub chunk_id: u32,
}
pub struct Mp4Track {
pub track_id: u32,
pub duration: u64,
pub samples: Vec<Mp4SampleOffset>,
pub tkhd: crate::TkhdBox,
pub mdia: crate::MdiaBox,
}
impl Mp4Track {
fn new(trak: crate::TrakBox, offsets: &mut BTreeSet<u64>) -> Result<Mp4Track, BoxError> {
let default_sample_duration = 1024;
let mut total_duration = 0;
let mut samples = Vec::with_capacity(trak.mdia.minf.stbl.stsz.sample_count as _);
let stco = &trak.mdia.minf.stbl.stco;
let co64 = &trak.mdia.minf.stbl.co64;
let mb_iter1 = stco.clone().map(IntoIterator::into_iter);
let mb_iter2 = co64.clone().map(IntoIterator::into_iter);
if let Some(stco) = co64.as_ref().map(IntoIterator::into_iter) {
offsets.extend(stco);
}
if let Some(stco) = stco.as_ref().map(IntoIterator::into_iter) {
offsets.extend(stco);
}
let chunk_iter = chunk_iter(
trak.mdia.minf.stbl.stsc.entries.clone().into_iter(),
mb_iter1
.into_iter()
.flatten()
.chain(mb_iter2.into_iter().flatten()),
);
let mut sample_chunk_iter = run_len_iter(chunk_iter);
let sync_iter_peek = trak
.mdia
.minf
.stbl
.stss
.as_ref()
.map(|x| x.entries.iter().copied().peekable());
let mut sync_iter =
(1..=trak.mdia.minf.stbl.stsz.sample_count).scan(sync_iter_peek, |iter, idx| {
let iter = iter.as_mut()?;
Some(if idx == iter.peek().copied().unwrap_or(u32::MAX) {
iter.next();
true
} else {
false
})
});
let mut ts_deltas =
run_len_iter(trak.mdia.minf.stbl.stts.entries.clone().into_iter().chain(
std::iter::once(SttsEntry {
sample_count: u32::MAX,
sample_delta: default_sample_duration,
}),
))
.scan(0u64, |s, delta| {
let out = *s;
*s += delta as u64;
Some((out, delta))
});
let mut rend_offset_iter = run_len_iter(
trak.mdia
.minf
.stbl
.ctts
.clone()
.into_iter()
.flat_map(|x| x.entries.into_iter()),
);
let mut sample_offset = 0;
let mut curr_chunk_index = 0;
let mut prev_size = 0;
for sample_idx in 0..trak.mdia.minf.stbl.stsz.sample_count as usize {
let (start_time, duration) = ts_deltas.next().unwrap();
let chunk = sample_chunk_iter.next().unwrap();
let size = *trak
.mdia
.minf
.stbl
.stsz
.sample_sizes
.get(sample_idx)
.unwrap_or(&trak.mdia.minf.stbl.stsz.sample_size);
if curr_chunk_index != chunk.index {
curr_chunk_index = chunk.index;
sample_offset = 0;
} else {
sample_offset += prev_size;
}
prev_size = size;
total_duration = start_time + duration as u64;
samples.push(Mp4SampleOffset {
chunk_id: chunk.index,
offset: chunk.offset + sample_offset as u64,
size,
duration,
start_time,
rendering_offset: rend_offset_iter.next().unwrap_or(0),
is_sync: sync_iter.next().unwrap_or(true),
})
}
Ok(Self {
track_id: trak.tkhd.track_id,
tkhd: trak.tkhd,
mdia: trak.mdia,
samples,
duration: total_duration,
})
}
#[inline]
pub fn track_type(&self) -> TrackType {
TrackType::from(&self.mdia.hdlr.handler_type)
}
pub(crate) fn add_traf(
&mut self,
base_moof_offset: u64,
chunk_index: u32,
traf: crate::TrafBox,
offsets: &mut BTreeSet<u64>,
) {
let base_data_offset = traf.tfhd.base_data_offset.unwrap_or(base_moof_offset);
offsets.insert(base_data_offset);
let default_sample_size = traf.tfhd.default_sample_size.unwrap_or(0);
let default_sample_duration = traf.tfhd.default_sample_duration.unwrap_or(0);
let base_start_time = traf
.tfdt
.map(|x| x.base_media_decode_time)
.or_else(|| {
self.samples
.last()
.map(|x| x.start_time + x.duration as u64)
})
.unwrap_or(0);
let Some(trun) = traf.trun else {
return;
};
let mut sample_offset = 0u64;
let mut start_time_offset = 0u64;
for sample_idx in 0..trun.sample_count as usize {
let size = trun
.sample_sizes
.get(sample_idx)
.copied()
.unwrap_or(default_sample_size);
let duration = trun
.sample_durations
.get(sample_idx)
.copied()
.unwrap_or(default_sample_duration);
let rendering_offset = trun.sample_cts.get(sample_idx).copied().unwrap_or(0) as i32;
self.samples.push(Mp4SampleOffset {
chunk_id: chunk_index,
offset: (base_data_offset as i64
+ trun.data_offset.map(|x| x as i64).unwrap_or(0)
+ sample_offset as i64) as u64,
size,
duration,
start_time: base_start_time + start_time_offset,
rendering_offset,
is_sync: sample_idx == 0,
});
sample_offset += size as u64;
start_time_offset += duration as u64;
}
}
}
trait RunLenghtItem {
type Value: Clone;
fn count(&self) -> usize;
fn value(&self) -> Self::Value;
}
impl<T: Clone> RunLenghtItem for (usize, T) {
type Value = T;
fn count(&self) -> usize {
self.0
}
fn value(&self) -> Self::Value {
self.1.clone()
}
}
impl RunLenghtItem for CttsEntry {
type Value = i32;
fn count(&self) -> usize {
self.sample_count as _
}
fn value(&self) -> Self::Value {
self.sample_offset
}
}
impl RunLenghtItem for SttsEntry {
type Value = u32;
fn count(&self) -> usize {
self.sample_count as _
}
fn value(&self) -> Self::Value {
self.sample_delta
}
}
fn run_len_iter<E: RunLenghtItem, I: IntoIterator<Item = E>>(
iter: I,
) -> impl Iterator<Item = E::Value> {
let mut iter = iter.into_iter();
let mut value = None::<E::Value>;
let mut repeat = 0;
std::iter::from_fn(move || loop {
if let Some(val) = &value {
if repeat > 0 {
repeat -= 1;
return Some(val.clone());
} else {
value = None;
}
}
let x = iter.next()?;
value = Some(x.value());
repeat = x.count();
})
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Chunk {
pub index: u32,
pub offset: u64,
pub samples_per_chunk: u32,
pub sample_description_index: u32,
}
impl RunLenghtItem for Chunk {
type Value = Chunk;
fn count(&self) -> usize {
self.samples_per_chunk as _
}
fn value(&self) -> Self::Value {
*self
}
}
fn chunk_iter(
mut stsc: impl Iterator<Item = StscEntry>,
stco: impl Iterator<Item = u64>,
) -> impl Iterator<Item = Chunk> {
let mut prev = stsc.next().unwrap_or(StscEntry {
first_chunk: 1,
samples_per_chunk: u32::MAX,
sample_description_index: 1,
first_sample: 1,
});
let mut curr = stsc.next();
stco.enumerate().map(move |(idx, offset)| {
if let Some(c) = &curr {
if idx + 1 >= c.first_chunk as usize {
prev = *c;
curr = stsc.next();
}
}
Chunk {
index: idx as _,
offset,
samples_per_chunk: prev.samples_per_chunk,
sample_description_index: prev.sample_description_index,
}
})
// Some((
// track_id,
// async_stream::stream! {
// for samp_offset in track.samples {
// yield Ok(Mp4Sample {
// start_time: samp_offset.start_time,
// duration: samp_offset.duration,
// rendering_offset: samp_offset.rendering_offset,
// is_sync: samp_offset.is_sync,
// bytes: Bytes::new(),
// })
// }
// },
// ))
// })
// }
}

View file

@ -1,139 +0,0 @@
use std::collections::HashMap;
use tokio::io::{AsyncRead, AsyncReadExt};
use crate::{BlockReader, BoxHeader, BoxType, EmsgBox, Error, FtypBox, MoofBox, MoovBox, Mp4Track};
#[derive(Debug, Clone)]
pub struct Mp4Header {
pub ftyp: Option<FtypBox>,
pub moov: Option<MoovBox>,
pub moofs: Vec<MoofBox>,
pub emsgs: Vec<EmsgBox>,
pub data: Vec<(u64, u64)>,
}
impl Mp4Header {
pub async fn read_until_mdat<R, C>(reader: &mut R) -> Result<Self, Error>
where
R: AsyncRead + Unpin,
{
let mut offset = 0;
let mut ftyp = None;
let mut moov = None;
let mut moofs = Vec::new();
// let mut moof_offsets = Vec::new();
let mut emsgs = Vec::new();
let mut buff = Vec::with_capacity(8192);
while let Some(BoxHeader { kind, size: s }) = BoxHeader::read(reader).await? {
if buff.len() < s as usize {
buff.resize(s as usize, 0);
}
// Match and parse the atom boxes.
match kind {
BoxType::FtypBox => {
reader.read_exact(&mut buff[0..s as usize]).await?;
ftyp = Some(FtypBox::read_block(&mut &buff[0..s as usize])?);
}
BoxType::MoovBox => {
reader.read_exact(&mut buff[0..s as usize]).await?;
moov = Some(MoovBox::read_block(&mut &buff[0..s as usize])?);
}
BoxType::MoofBox => {
let moof_offset = reader.stream_position()? - 8;
let moof = MoofBox::read_box(reader, s)?;
moofs.push(moof);
moof_offsets.push(moof_offset);
}
BoxType::EmsgBox => {
let emsg = EmsgBox::read_box(reader, s)?;
emsgs.push(emsg);
}
BoxType::MdatBox => {}
// BoxType::FreeBox => {
// reader.read_exact(buf)
// skip_box(reader, s)?;
// }
bt => {
println!("skip {:?}", bt);
let mut buff = [0u8; 1024];
let mut read = 0;
for chunk in (0..s).step_by(1024) {
if chunk == 0 {
continue;
}
reader.read_exact(&mut buff).await?;
read += buff.len();
}
if s as usize - read > 0 {
reader.read_exact(&mut buff[0..s as usize - read]).await?;
}
}
}
}
if ftyp.is_none() {
return Err(Error::BoxNotFound(BoxType::FtypBox));
}
if moov.is_none() {
return Err(Error::BoxNotFound(BoxType::MoovBox));
}
let mut tracks = if let Some(ref moov) = moov {
if moov.traks.iter().any(|trak| trak.tkhd.track_id == 0) {
return Err(Error::InvalidData("illegal track id 0"));
}
moov.traks
.iter()
.map(|trak| (trak.tkhd.track_id, Mp4Track::from(trak)))
.collect()
} else {
HashMap::new()
};
// Update tracks if any fragmented (moof) boxes are found.
// if !moofs.is_empty() {
// let mut default_sample_duration = 0;
// if let Some(ref moov) = moov {
// if let Some(ref mvex) = &moov.mvex {
// default_sample_duration = mvex.trex.default_sample_duration
// }
// }
// for (moof, moof_offset) in moofs.iter().zip(moof_offsets) {
// for traf in moof.trafs.iter() {
// let track_id = traf.tfhd.track_id;
// if let Some(track) = tracks.get_mut(&track_id) {
// track.default_sample_duration = default_sample_duration;
// track.moof_offsets.push(moof_offset);
// track.trafs.push(traf.clone())
// } else {
// return Err(Error::TrakNotFound(track_id));
// }
// }
// }
// }
Ok(Mp4Header {
ftyp,
moov,
moofs,
emsgs,
tracks,
})
}
pub fn can_be_streamed(&self) -> bool {
self.moov.is_some()
}
}

View file

@ -65,7 +65,7 @@
//! [examples]: https://github.com/alfg/mp4-rust/tree/master/examples
#![doc(html_root_url = "https://docs.rs/mp4/*")]
mod error;
pub mod error;
use error::BoxError;
pub use error::Error;
@ -78,19 +78,14 @@ mod mp4box;
pub use mp4box::*;
mod file;
// mod header;
mod stream;
mod track;
pub use track::{Mp4Track, TrackConfig};
pub use track::Mp4Track;
pub use file::*;
// mod async_reader;
// pub use async_reader::{AsyncMp4Reader, Mp4Header};
mod writer;
pub use writer::{Mp4Config, Mp4Writer};
// pub async fn read_mp4(f: File) -> Result<Mp4Reader<BufReader<File>>> {
// let size = f.metadata()?.len();
// let reader = BufReader::new(f);

View file

@ -1,361 +0,0 @@
use std::collections::HashMap;
use std::io::{Read, Seek};
use std::time::Duration;
use crate::meta::MetaBox;
use crate::*;
#[derive(Debug)]
pub struct Mp4Header {
pub ftyp: FtypBox,
pub moov: MoovBox,
pub moofs: Vec<MoofBox>,
pub emsgs: Vec<EmsgBox>,
tracks: HashMap<u32, Mp4Track>,
size: u64,
}
impl Mp4Header {
pub fn read<R: Read + Seek>(reader: &mut R, size: u64) -> Result<Self> {
let start = reader.stream_position()?;
let mut ftyp = None;
let mut moov = None;
let mut moofs = Vec::new();
let mut moof_offsets = Vec::new();
let mut emsgs = Vec::new();
let mut current = start;
while current < size {
// Get box header.
let header = BoxHeader::read(reader)?;
let BoxHeader { name, size: s } = header;
if s > size {
break;
}
// Break if size zero BoxHeader, which can result in dead-loop.
if s == 0 {
break;
}
// Match and parse the atom boxes.
match name {
BoxType::FtypBox => {
ftyp = Some(FtypBox::read_box(reader, s)?);
}
BoxType::FreeBox => {
skip_box(reader, s)?;
}
BoxType::MdatBox => {
skip_box(reader, s)?;
}
BoxType::MoovBox => {
moov = Some(MoovBox::read_box(reader, s)?);
}
BoxType::MoofBox => {
let moof_offset = reader.stream_position()? - 8;
let moof = MoofBox::read_box(reader, s)?;
moofs.push(moof);
moof_offsets.push(moof_offset);
}
BoxType::EmsgBox => {
let emsg = EmsgBox::read_box(reader, s)?;
emsgs.push(emsg);
}
_ => {
// XXX warn!()
skip_box(reader, s)?;
}
}
current = reader.stream_position()?;
}
if ftyp.is_none() {
return Err(Error::BoxNotFound(BoxType::FtypBox));
}
if moov.is_none() {
return Err(Error::BoxNotFound(BoxType::MoovBox));
}
let size = current - start;
let mut tracks = if let Some(ref moov) = moov {
if moov.traks.iter().any(|trak| trak.tkhd.track_id == 0) {
return Err(Error::InvalidData("illegal track id 0"));
}
moov.traks
.iter()
.map(|trak| (trak.tkhd.track_id, Mp4Track::from(trak)))
.collect()
} else {
HashMap::new()
};
// Update tracks if any fragmented (moof) boxes are found.
if !moofs.is_empty() {
let mut default_sample_duration = 0;
if let Some(ref moov) = moov {
if let Some(ref mvex) = &moov.mvex {
default_sample_duration = mvex.trex.default_sample_duration
}
}
for (moof, moof_offset) in moofs.iter().zip(moof_offsets) {
for traf in moof.trafs.iter() {
let track_id = traf.tfhd.track_id;
if let Some(track) = tracks.get_mut(&track_id) {
track.default_sample_duration = default_sample_duration;
track.moof_offsets.push(moof_offset);
track.trafs.push(traf.clone())
} else {
return Err(Error::TrakNotFound(track_id));
}
}
}
}
Ok(Mp4Header {
ftyp: ftyp.unwrap(),
moov: moov.unwrap(),
moofs,
emsgs,
size,
tracks,
})
}
pub fn read_fragment<R: Read + Seek>(&self, reader: &mut R, size: u64) -> Result<Self> {
let start = reader.stream_position()?;
let mut moofs = Vec::new();
let mut moof_offsets = Vec::new();
let mut current = start;
while current < size {
// Get box header.
let header = BoxHeader::read(reader)?;
let BoxHeader { name, size: s } = header;
if s > size {
return Err(Error::InvalidData(
"file contains a box with a larger size than it",
));
}
// Break if size zero BoxHeader, which can result in dead-loop.
if s == 0 {
break;
}
// Match and parse the atom boxes.
match name {
BoxType::MdatBox => {
skip_box(reader, s)?;
}
BoxType::MoofBox => {
let moof_offset = reader.stream_position()? - 8;
let moof = MoofBox::read_box(reader, s)?;
moofs.push(moof);
moof_offsets.push(moof_offset);
}
_ => {
// XXX warn!()
skip_box(reader, s)?;
}
}
current = reader.stream_position()?;
}
if moofs.is_empty() {
return Err(Error::BoxNotFound(BoxType::MoofBox));
}
let size = current - start;
let mut tracks: HashMap<u32, Mp4Track> = self
.moov
.traks
.iter()
.map(|trak| (trak.tkhd.track_id, Mp4Track::from(trak)))
.collect();
let mut default_sample_duration = 0;
if let Some(ref mvex) = &self.moov.mvex {
default_sample_duration = mvex.trex.default_sample_duration
}
for (moof, moof_offset) in moofs.iter().zip(moof_offsets) {
for traf in moof.trafs.iter() {
let track_id = traf.tfhd.track_id;
if let Some(track) = tracks.get_mut(&track_id) {
track.default_sample_duration = default_sample_duration;
track.moof_offsets.push(moof_offset);
track.trafs.push(traf.clone())
} else {
return Err(Error::TrakNotFound(track_id));
}
}
}
Ok(Mp4Header {
ftyp: self.ftyp.clone(),
moov: self.moov.clone(),
moofs,
emsgs: Vec::new(),
tracks,
size,
})
}
#[inline]
pub fn size(&self) -> u64 {
self.size
}
#[inline]
pub fn major_brand(&self) -> &FourCC {
&self.ftyp.major_brand
}
pub fn minor_version(&self) -> u32 {
self.ftyp.minor_version
}
pub fn compatible_brands(&self) -> &[FourCC] {
&self.ftyp.compatible_brands
}
pub fn duration(&self) -> Duration {
Duration::from_millis(self.moov.mvhd.duration * 1000 / self.moov.mvhd.timescale as u64)
}
pub fn timescale(&self) -> u32 {
self.moov.mvhd.timescale
}
pub fn is_fragmented(&self) -> bool {
!self.moofs.is_empty()
}
pub fn tracks(&self) -> &HashMap<u32, Mp4Track> {
&self.tracks
}
pub fn sample_count(&self, track_id: u32) -> Result<u32> {
if let Some(track) = self.tracks.get(&track_id) {
Ok(track.sample_count())
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn read_sample<R: Read + Seek>(
&mut self,
reader: &mut R,
track_id: u32,
sample_id: u32,
) -> Result<Option<Mp4Sample>> {
if let Some(track) = self.tracks.get(&track_id) {
track.read_sample(reader, sample_id)
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn sample_offset(&mut self, track_id: u32, sample_id: u32) -> Result<u64> {
if let Some(track) = self.tracks.get(&track_id) {
track.sample_offset(sample_id)
} else {
Err(Error::TrakNotFound(track_id))
}
}
pub fn metadata(&self) -> impl Metadata<'_> {
self.moov.udta.as_ref().and_then(|udta| {
udta.meta.as_ref().and_then(|meta| match meta {
MetaBox::Mdir { ilst } => ilst.as_ref(),
_ => None,
})
})
}
}
#[derive(Debug)]
pub struct Mp4Reader<R> {
reader: R,
pub header: Mp4Header,
}
impl<R: Read + Seek> Mp4Reader<R> {
pub fn from_reader(reader: R, header: Mp4Header) -> Self {
Self { reader, header }
}
pub fn read_header(mut reader: R, size: u64) -> Result<Self> {
Ok(Mp4Reader {
header: Mp4Header::read(&mut reader, size)?,
reader,
})
}
pub fn read_fragment_header<FR: Read + Seek>(
&self,
mut reader: FR,
size: u64,
) -> Result<Mp4Reader<FR>> {
Ok(Mp4Reader {
header: self.header.read_fragment(&mut reader, size)?,
reader,
})
}
pub fn size(&self) -> u64 {
self.header.size()
}
pub fn major_brand(&self) -> &FourCC {
self.header.major_brand()
}
pub fn minor_version(&self) -> u32 {
self.header.minor_version()
}
pub fn compatible_brands(&self) -> &[FourCC] {
self.header.compatible_brands()
}
pub fn duration(&self) -> Duration {
self.header.duration()
}
pub fn timescale(&self) -> u32 {
self.header.timescale()
}
pub fn is_fragmented(&self) -> bool {
self.header.is_fragmented()
}
pub fn tracks(&self) -> &HashMap<u32, Mp4Track> {
self.header.tracks()
}
pub fn sample_count(&self, track_id: u32) -> Result<u32> {
self.header.sample_count(track_id)
}
pub fn read_sample(&mut self, track_id: u32, sample_id: u32) -> Result<Option<Mp4Sample>> {
self.header
.read_sample(&mut self.reader, track_id, sample_id)
}
pub fn sample_offset(&mut self, track_id: u32, sample_id: u32) -> Result<u64> {
self.header.sample_offset(track_id, sample_id)
}
}
impl<R> Mp4Reader<R> {
pub fn metadata(&self) -> impl Metadata<'_> {
self.header.metadata()
}
}

View file

@ -1,19 +0,0 @@
use std::{
pin::Pin,
task::{Context, Poll},
};
use futures::Stream;
use crate::Error;
pub struct Mp4Frame {}
pub struct Mp4Stream {}
// impl Stream for Mp4Stream {
// type Item = Result<Mp4Frame, Error>;
// fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
// todo!()
// }
// }

File diff suppressed because it is too large Load diff

View file

@ -1,149 +0,0 @@
use byteorder::{BigEndian, WriteBytesExt};
use std::io::{Seek, SeekFrom, Write};
use crate::mp4box::*;
use crate::track::Mp4TrackWriter;
use crate::*;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Mp4Config {
pub major_brand: FourCC,
pub minor_version: u32,
pub compatible_brands: Vec<FourCC>,
pub timescale: u32,
}
#[derive(Debug)]
pub struct Mp4Writer<W> {
writer: W,
tracks: Vec<Mp4TrackWriter>,
mdat_pos: u64,
timescale: u32,
duration: u64,
}
impl<W> Mp4Writer<W> {
/// Consume self, returning the inner writer.
///
/// This can be useful to recover the inner writer after completion in case
/// it's owned by the [Mp4Writer] instance.
///
/// # Examples
///
/// ```rust
/// use mp4::{Mp4Writer, Mp4Config};
/// use std::io::Cursor;
///
/// # fn main() -> mp4::Result<()> {
/// let config = Mp4Config {
/// major_brand: str::parse("isom").unwrap(),
/// minor_version: 512,
/// compatible_brands: vec![
/// str::parse("isom").unwrap(),
/// str::parse("iso2").unwrap(),
/// str::parse("avc1").unwrap(),
/// str::parse("mp41").unwrap(),
/// ],
/// timescale: 1000,
/// };
///
/// let data = Cursor::new(Vec::<u8>::new());
/// let mut writer = mp4::Mp4Writer::write_start(data, &config)?;
/// writer.write_end()?;
///
/// let data: Vec<u8> = writer.into_writer().into_inner();
/// # Ok(()) }
/// ```
pub fn into_writer(self) -> W {
self.writer
}
}
impl<W: Write + Seek> Mp4Writer<W> {
pub fn write_start(mut writer: W, config: &Mp4Config) -> Result<Self> {
let ftyp = FtypBox {
major_brand: config.major_brand,
minor_version: config.minor_version,
compatible_brands: config.compatible_brands.clone(),
};
ftyp.write_box(&mut writer)?;
// TODO largesize
let mdat_pos = writer.stream_position()?;
BoxHeader::new(BoxType::MdatBox, HEADER_SIZE).write(&mut writer)?;
BoxHeader::new(BoxType::WideBox, HEADER_SIZE).write(&mut writer)?;
let tracks = Vec::new();
let timescale = config.timescale;
let duration = 0;
Ok(Self {
writer,
tracks,
mdat_pos,
timescale,
duration,
})
}
pub fn add_track(&mut self, config: &TrackConfig) -> Result<()> {
let track_id = self.tracks.len() as u32 + 1;
let track = Mp4TrackWriter::new(track_id, config)?;
self.tracks.push(track);
Ok(())
}
fn update_durations(&mut self, track_dur: u64) {
if track_dur > self.duration {
self.duration = track_dur;
}
}
pub fn write_sample(&mut self, track_id: u32, sample: &Mp4Sample) -> Result<()> {
if track_id == 0 {
return Err(BoxError::TrakNotFound(track_id));
}
let track_dur = if let Some(ref mut track) = self.tracks.get_mut(track_id as usize - 1) {
track.write_sample(&mut self.writer, sample, self.timescale)?
} else {
return Err(BoxError::TrakNotFound(track_id));
};
self.update_durations(track_dur);
Ok(())
}
fn update_mdat_size(&mut self) -> Result<()> {
let mdat_end = self.writer.stream_position()?;
let mdat_size = mdat_end - self.mdat_pos;
if mdat_size > std::u32::MAX as u64 {
self.writer.seek(SeekFrom::Start(self.mdat_pos))?;
self.writer.write_u32::<BigEndian>(1)?;
self.writer.seek(SeekFrom::Start(self.mdat_pos + 8))?;
self.writer.write_u64::<BigEndian>(mdat_size)?;
} else {
self.writer.seek(SeekFrom::Start(self.mdat_pos))?;
self.writer.write_u32::<BigEndian>(mdat_size as u32)?;
}
self.writer.seek(SeekFrom::Start(mdat_end))?;
Ok(())
}
pub fn write_end(&mut self) -> Result<()> {
let mut moov = MoovBox::default();
for track in self.tracks.iter_mut() {
moov.traks.push(track.write_end(&mut self.writer)?);
}
self.update_mdat_size()?;
moov.mvhd.timescale = self.timescale;
moov.mvhd.duration = self.duration;
if moov.mvhd.duration > (u32::MAX as u64) {
moov.mvhd.version = 1
}
moov.write_box(&mut self.writer)?;
Ok(())
}
}

View file

@ -1,6 +1,4 @@
use mp4::{
AudioObjectType, AvcProfile, ChannelConfig, MediaType, Mp4Reader, SampleFreqIndex, TrackType,
};
use mp4::{AudioObjectType, AvcProfile, ChannelConfig, MediaType, SampleFreqIndex, TrackType};
use std::fs::{self, File};
use std::io::BufReader;
use std::time::Duration;
@ -155,9 +153,11 @@ fn test_read_extended_audio_object_type() {
fn get_reader(path: &str) -> Mp4Reader<BufReader<File>> {
let f = File::open(path).unwrap();
let f_size = f.metadata().unwrap().len();
let reader = BufReader::new(f);
let mut reader = BufReader::new(f);
mp4::Mp4Reader::read_header(reader, f_size).unwrap()
let mp4_file = mp4::Mp4File::new(&mut reader);
mp4::Mp4File::read_header(reader, f_size).unwrap()
}
#[test]