// built on example from https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/DemuxAndDecodeH264.java !include once #1035133 // Video Libs [Include] import java.time.Duration; import java.time.temporal.ChronoUnit; import org.bytedeco.javacpp.*; import static org.bytedeco.javacpp.avcodec.*; import static org.bytedeco.javacpp.avformat.*; import static org.bytedeco.javacpp.avutil.*; import org.bytedeco.javacpp.presets.avutil; sclass ImageStreamFromVideoFile is Steppable, AutoCloseable { // image: video frame as an image. this image is a buffer, i.e. // don't hold on to it in the listener after it returns. // d: position in video event haveImage(BufferedImage image, Duration d); // set when you are no longer requested in more frames settable bool stopped; /** Matroska format context */ private AVFormatContext avfmtCtx; /** Matroska video stream information */ private AVStream videoStream; /** matroska packet */ private AVPacket avpacket; /** H264 Decoder ID */ private AVCodec codec; /** H264 Decoder context */ private AVCodecContext codecContext; /** yuv420 frame */ private AVFrame yuv420Frame; /** RGB frame */ private AVFrame rgbFrame; /** java RGB frame */ private BufferedImage img; /** yuv420 to rgb converter */ private swscale.SwsContext sws_ctx; /** number of frames decoded */ settable long nFrames; // where are we in the video gettable Duration position; /* 1/1000 of second */ private AVRational tb1000; Thread decodeThread; { tb1000 = new AVRational(); tb1000.num(1); tb1000.den(1000); } void open(File videoFile) throws IOException { if (avfmtCtx != null) fail("Already opened a file"); av_log_set_level(AV_LOG_VERBOSE); openInput(videoFile); findVideoStream(); initDecoder(); initRgbFrame(); initYuv420Frame(); getSwsContext(); avpacket = new avcodec.AVPacket(); } int stage; public bool step() ctex { if (stopped()) false; if (stage == 0) { if ((av_read_frame(avfmtCtx, avpacket)) >= 0) { if (avpacket.stream_index() == videoStream.index()) processAVPacket(avpacket); av_packet_unref(avpacket); } else stage = 1; true; } if (stage == 1) { // process delayed frames processAVPacket(null); stage = 2; false; } false; } private AVFormatContext openInput(File videoFile) throws IOException { avfmtCtx = new AVFormatContext(null); BytePointer filePointer = new BytePointer(videoFile.getPath()); int r = avformat.avformat_open_input(avfmtCtx, filePointer, null, null); filePointer.deallocate(); if (r < 0) { avfmtCtx.close(); throw new IOException("avformat_open_input error: " + r); } return avfmtCtx; } private void findVideoStream() throws IOException { int r = avformat_find_stream_info(avfmtCtx, (PointerPointer) null); if (r < 0) { avformat_close_input(avfmtCtx); avfmtCtx.close(); throw new IOException("error: " + r); } PointerPointer decoderRet = new PointerPointer<>(1); int videoStreamNumber = av_find_best_stream(avfmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, decoderRet, 0); if (videoStreamNumber < 0) { throw new IOException("failed to find video stream"); } if (decoderRet.get(AVCodec.class).id() != AV_CODEC_ID_H264) { throw new IOException("failed to find h264 stream"); } decoderRet.deallocate(); videoStream = avfmtCtx.streams(videoStreamNumber); } private void initDecoder() { codec = avcodec_find_decoder(AV_CODEC_ID_H264); codecContext = avcodec_alloc_context3(codec); if((codec.capabilities() & avcodec.AV_CODEC_CAP_TRUNCATED) != 0) { codecContext.flags(codecContext.flags() | avcodec.AV_CODEC_CAP_TRUNCATED); } avcodec_parameters_to_context(codecContext, videoStream.codecpar()); if(avcodec_open2(codecContext, codec, (PointerPointer) null) < 0) { throw new RuntimeException("Error: could not open codec.\n"); } } private void initYuv420Frame() { yuv420Frame = av_frame_alloc(); if (yuv420Frame == null) { throw new RuntimeException("Could not allocate video frame\n"); } } private void initRgbFrame() { rgbFrame = av_frame_alloc(); rgbFrame.format(AV_PIX_FMT_BGR24); rgbFrame.width(codecContext.width()); rgbFrame.height(codecContext.height()); int ret = av_image_alloc(rgbFrame.data(), rgbFrame.linesize(), rgbFrame.width(), rgbFrame.height(), rgbFrame.format(), 1); if (ret < 0) { throw new RuntimeException("could not allocate buffer!"); } img = new BufferedImage(rgbFrame.width(), rgbFrame.height(), BufferedImage.TYPE_3BYTE_BGR); } private void getSwsContext() { sws_ctx = swscale.sws_getContext( codecContext.width(), codecContext.height(), codecContext.pix_fmt(), rgbFrame.width(), rgbFrame.height(), rgbFrame.format(), 0, null, null, (DoublePointer) null); } private void processAVPacket(AVPacket avpacket) throws IOException { int ret = avcodec.avcodec_send_packet(codecContext, avpacket); if (ret < 0) fail("Error sending a packet for decoding"); receiveFrames(); } // TODO: do in multiple step()s private void receiveFrames() throws IOException { while (!stopped() && receiveFrame()) {} } bool receiveFrame() throws IOException { int ret = avcodec.avcodec_receive_frame(codecContext, yuv420Frame); if (ret == avutil.AVERROR_EAGAIN() || ret == org.bytedeco.javacpp.avutil.AVERROR_EOF()) false; else if (ret < 0) fail("error during video decoding"); swscale.sws_scale(sws_ctx, yuv420Frame.data(), yuv420Frame.linesize(), 0, yuv420Frame.height(), rgbFrame.data(), rgbFrame.linesize()); rgbFrame.best_effort_timestamp(yuv420Frame.best_effort_timestamp()); processFrame(rgbFrame); true; } private void processFrame(AVFrame rgbFrame) { DataBufferByte buffer = cast img.getRaster().getDataBuffer(); rgbFrame.data(0).get(buffer.getData()); long ptsMillis = av_rescale_q(rgbFrame.best_effort_timestamp(), videoStream.time_base(), tb1000); position = Duration.of(ptsMillis, ChronoUnit.MILLIS); ++nFrames; haveImage(img, position); } close { stopped(true); if (decodeThread == null) _actualClose(); } void _actualClose { if (avpacket != null) av_packet_unref(avpacket); if (codecContext != null) { avcodec.avcodec_close(codecContext); avcodec.avcodec_free_context(codecContext); } swscale.sws_freeContext(sws_ctx); if (rgbFrame != null) av_frame_free(rgbFrame); if (yuv420Frame != null) av_frame_free(yuv420Frame); if (avfmtCtx != null) { avformat.avformat_close_input(avfmtCtx); avformat.avformat_free_context(avfmtCtx); } } void stop { stopped(true); interruptThread(decodeThread); } void startDecodeThread { if (decodeThread != null) ret; decodeThread = startThread("Decode video", r { stepAll(ImageStreamFromVideoFile.this); if (stopped()) _actualClose(); }); } } // end of ImageStreamFromVideoFile