I need some help accurately trimming video using the FFmpeg C api -


i need accurately trimming video using ffmpeg c api. i’m seeing when input video stream has time_base of 1/48000 correctly selects trimmed start , end times. when input video stream has time_base other 1/48000 trim incorrect.

time bases of 1/30000 , 1/24000 trim half expected video stream length - 10s instead of 20s. 1/25 trims nothing @ - output file size few kb.

the audio stream appears trimmed correctly.

for example if try trim first 20s of video video stream time base 1/30000, output mp4’s length 20s. has first 10s of video , first 20s of audio.

i think i'm incorrectly calculating end_time i'm not sure why it's correct 1/48000 time_base streams.

record[i].start_time = av_rescale_q((int64_t)( start_time * av_time_base ), default_timebase, in_stream->time_base); record[i].end_time = av_rescale_q((int64_t)( end_time   * av_time_base ), default_timebase, in_stream->time_base); 

here more complete sample of code:

int num_of_streams = ifmt_ctx->nb_streams; if(num_of_streams > 0) {     // keeps track of each stream's trimmed start , end times     struct stream_pts record[num_of_streams];      (i = 0; < num_of_streams; i++) {         avstream *in_stream = ifmt_ctx->streams[i];         avstream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);         if (!out_stream) {             loge("=> failed allocating output stream");             ret = averror_unknown;             return close_connection(ret);         }          ret = avcodec_copy_context(out_stream->codec, in_stream->codec);         if (ret < 0) {             loge("=> failed copy context input output stream codec context");             return close_connection(ret);         }         out_stream->codec->codec_tag = 0;         if (ofmt_ctx->oformat->flags & avfmt_globalheader)             out_stream->codec->flags |= codec_flag_global_header;          avrational default_timebase;         default_timebase.num = 1;         default_timebase.den = av_time_base;          // determine start/end times each stream         record[i].index = i;         record[i].start_time = av_rescale_q((int64_t)( start_time * av_time_base ), default_timebase, in_stream->time_base);         record[i].end_time = av_rescale_q((int64_t)( end_time   * av_time_base ), default_timebase, in_stream->time_base);     }      av_dump_format(ofmt_ctx, 0, output_file, 1);      if (!(ofmt->flags & avfmt_nofile)) {         ret = avio_open(&ofmt_ctx->pb, output_file, avio_flag_write);         if (ret < 0) {             loge("=> not open output file '%s'", output_file);             return close_connection(ret);         }     }      ret = avformat_write_header(ofmt_ctx, null);     if (ret < 0) {         loge("=> error occurred when opening output file");         return close_connection(ret);     }      while (1) {         avstream *in_stream, *out_stream;          ret = av_read_frame(ifmt_ctx, &pkt);         if (ret < 0)             break;          in_stream  = ifmt_ctx->streams[pkt.stream_index];         out_stream = ofmt_ctx->streams[pkt.stream_index];          // copy packet         pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, av_round_near_inf|av_round_pass_minmax);         pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, av_round_near_inf|av_round_pass_minmax);         pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);         pkt.pos = -1;          // write frames we're looking         if(pkt.pts >= record[pkt.stream_index].start_time && pkt.pts <= record[pkt.stream_index].end_time) {             ret = av_interleaved_write_frame(ofmt_ctx, &pkt);             if (ret < 0) {                 loge("=> error muxing packet");                 break;             }         }          av_free_packet(&pkt);     } } 


Comments

Popular posts from this blog

java - Could not locate OpenAL library -

c++ - Delete matches in OpenCV (Keypoints and descriptors) -

sorting - opencl Bitonic sort with 64 bits keys -