Uses 34444K of libraries. Click here for Pure Java version (7812L/45K).
1 | // built on example from https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/DemuxAndDecodeH264.java |
2 | |
3 | !include once #1035133 // Video Libs [Include] |
4 | |
5 | import java.time.Duration; |
6 | import java.time.temporal.ChronoUnit; |
7 | |
8 | import org.bytedeco.javacpp.*; |
9 | import static org.bytedeco.javacpp.avcodec.*; |
10 | import static org.bytedeco.javacpp.avformat.*; |
11 | import static org.bytedeco.javacpp.avutil.*; |
12 | import org.bytedeco.javacpp.presets.avutil; |
13 | |
14 | sclass ImageStreamFromVideoFile is Steppable, AutoCloseable { |
15 | // image: video frame as an image. this image is a buffer, i.e. |
16 | // don't hold on to it in the listener after it returns. |
17 | // d: position in video |
18 | event haveImage(BufferedImage image, Duration d); |
19 | |
20 | // set when you are no longer requested in more frames |
21 | settable bool stopped; |
22 | |
23 | /** Matroska format context */ |
24 | private AVFormatContext avfmtCtx; |
25 | |
26 | /** Matroska video stream information */ |
27 | private AVStream videoStream; |
28 | |
29 | /** matroska packet */ |
30 | private AVPacket avpacket; |
31 | |
32 | /** H264 Decoder ID */ |
33 | private AVCodec codec; |
34 | |
35 | /** H264 Decoder context */ |
36 | private AVCodecContext codecContext; |
37 | |
38 | /** yuv420 frame */ |
39 | private AVFrame yuv420Frame; |
40 | |
41 | /** RGB frame */ |
42 | private AVFrame rgbFrame; |
43 | |
44 | /** java RGB frame */ |
45 | private BufferedImage img; |
46 | |
47 | /** yuv420 to rgb converter */ |
48 | private swscale.SwsContext sws_ctx; |
49 | |
50 | /** number of frames decoded */ |
51 | settable long nFrames; |
52 | |
53 | // where are we in the video |
54 | gettable Duration position; |
55 | |
56 | /* 1/1000 of second */ |
57 | private AVRational tb1000; |
58 | |
59 | Thread decodeThread; |
60 | |
61 | { |
62 | tb1000 = new AVRational(); |
63 | tb1000.num(1); |
64 | tb1000.den(1000); |
65 | } |
66 | |
67 | void open(File videoFile) throws IOException { |
68 | if (avfmtCtx != null) fail("Already opened a file"); |
69 | av_log_set_level(AV_LOG_VERBOSE); |
70 | |
71 | openInput(videoFile); |
72 | findVideoStream(); |
73 | initDecoder(); |
74 | initRgbFrame(); |
75 | initYuv420Frame(); |
76 | getSwsContext(); |
77 | |
78 | avpacket = new avcodec.AVPacket(); |
79 | } |
80 | |
81 | int stage; |
82 | |
83 | public bool step() ctex { |
84 | if (stopped()) false; |
85 | if (stage == 0) { |
86 | if ((av_read_frame(avfmtCtx, avpacket)) >= 0) { |
87 | if (avpacket.stream_index() == videoStream.index()) |
88 | processAVPacket(avpacket); |
89 | av_packet_unref(avpacket); |
90 | } else |
91 | stage = 1; |
92 | true; |
93 | } |
94 | if (stage == 1) { |
95 | // process delayed frames |
96 | processAVPacket(null); |
97 | stage = 2; |
98 | false; |
99 | } |
100 | false; |
101 | } |
102 | |
103 | private AVFormatContext openInput(File videoFile) throws IOException { |
104 | avfmtCtx = new AVFormatContext(null); |
105 | BytePointer filePointer = new BytePointer(videoFile.getPath()); |
106 | int r = avformat.avformat_open_input(avfmtCtx, filePointer, null, null); |
107 | filePointer.deallocate(); |
108 | if (r < 0) { |
109 | avfmtCtx.close(); |
110 | throw new IOException("avformat_open_input error: " + r); |
111 | } |
112 | return avfmtCtx; |
113 | } |
114 | |
115 | private void findVideoStream() throws IOException { |
116 | int r = avformat_find_stream_info(avfmtCtx, (PointerPointer) null); |
117 | if (r < 0) { |
118 | avformat_close_input(avfmtCtx); |
119 | avfmtCtx.close(); |
120 | throw new IOException("error: " + r); |
121 | } |
122 | |
123 | PointerPointer<AVCodec> decoderRet = new PointerPointer<>(1); |
124 | int videoStreamNumber = av_find_best_stream(avfmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, decoderRet, 0); |
125 | if (videoStreamNumber < 0) { |
126 | throw new IOException("failed to find video stream"); |
127 | } |
128 | |
129 | if (decoderRet.get(AVCodec.class).id() != AV_CODEC_ID_H264) { |
130 | throw new IOException("failed to find h264 stream"); |
131 | } |
132 | decoderRet.deallocate(); |
133 | videoStream = avfmtCtx.streams(videoStreamNumber); |
134 | } |
135 | |
136 | private void initDecoder() { |
137 | codec = avcodec_find_decoder(AV_CODEC_ID_H264); |
138 | codecContext = avcodec_alloc_context3(codec); |
139 | if((codec.capabilities() & avcodec.AV_CODEC_CAP_TRUNCATED) != 0) { |
140 | codecContext.flags(codecContext.flags() | avcodec.AV_CODEC_CAP_TRUNCATED); |
141 | } |
142 | avcodec_parameters_to_context(codecContext, videoStream.codecpar()); |
143 | if(avcodec_open2(codecContext, codec, (PointerPointer) null) < 0) { |
144 | throw new RuntimeException("Error: could not open codec.\n"); |
145 | } |
146 | } |
147 | |
148 | private void initYuv420Frame() { |
149 | yuv420Frame = av_frame_alloc(); |
150 | if (yuv420Frame == null) { |
151 | throw new RuntimeException("Could not allocate video frame\n"); |
152 | } |
153 | } |
154 | |
155 | private void initRgbFrame() { |
156 | rgbFrame = av_frame_alloc(); |
157 | rgbFrame.format(AV_PIX_FMT_BGR24); |
158 | rgbFrame.width(codecContext.width()); |
159 | rgbFrame.height(codecContext.height()); |
160 | int ret = av_image_alloc(rgbFrame.data(), |
161 | rgbFrame.linesize(), |
162 | rgbFrame.width(), |
163 | rgbFrame.height(), |
164 | rgbFrame.format(), |
165 | 1); |
166 | if (ret < 0) { |
167 | throw new RuntimeException("could not allocate buffer!"); |
168 | } |
169 | img = new BufferedImage(rgbFrame.width(), rgbFrame.height(), BufferedImage.TYPE_3BYTE_BGR); |
170 | } |
171 | |
172 | private void getSwsContext() { |
173 | sws_ctx = swscale.sws_getContext( |
174 | codecContext.width(), codecContext.height(), codecContext.pix_fmt(), |
175 | rgbFrame.width(), rgbFrame.height(), rgbFrame.format(), |
176 | 0, null, null, (DoublePointer) null); |
177 | } |
178 | |
179 | private void processAVPacket(AVPacket avpacket) throws IOException { |
180 | int ret = avcodec.avcodec_send_packet(codecContext, avpacket); |
181 | if (ret < 0) |
182 | fail("Error sending a packet for decoding"); |
183 | receiveFrames(); |
184 | } |
185 | |
186 | // TODO: do in multiple step()s |
187 | private void receiveFrames() throws IOException { |
188 | while (!stopped() && receiveFrame()) {} |
189 | } |
190 | |
191 | bool receiveFrame() throws IOException { |
192 | int ret = avcodec.avcodec_receive_frame(codecContext, yuv420Frame); |
193 | if (ret == avutil.AVERROR_EAGAIN() |
194 | || ret == org.bytedeco.javacpp.avutil.AVERROR_EOF()) |
195 | false; |
196 | else if (ret < 0) |
197 | fail("error during video decoding"); |
198 | |
199 | swscale.sws_scale(sws_ctx, yuv420Frame.data(), yuv420Frame.linesize(), 0, |
200 | yuv420Frame.height(), rgbFrame.data(), rgbFrame.linesize()); |
201 | |
202 | rgbFrame.best_effort_timestamp(yuv420Frame.best_effort_timestamp()); |
203 | processFrame(rgbFrame); |
204 | true; |
205 | } |
206 | |
207 | private void processFrame(AVFrame rgbFrame) { |
208 | DataBufferByte buffer = cast img.getRaster().getDataBuffer(); |
209 | rgbFrame.data(0).get(buffer.getData()); |
210 | |
211 | long ptsMillis = av_rescale_q(rgbFrame.best_effort_timestamp(), videoStream.time_base(), tb1000); |
212 | position = Duration.of(ptsMillis, ChronoUnit.MILLIS); |
213 | |
214 | ++nFrames; |
215 | haveImage(img, position); |
216 | } |
217 | |
218 | close { |
219 | stopped(true); |
220 | if (decodeThread == null) |
221 | _actualClose(); |
222 | } |
223 | |
224 | void _actualClose { |
225 | if (avpacket != null) av_packet_unref(avpacket); |
226 | if (codecContext != null) { |
227 | avcodec.avcodec_close(codecContext); |
228 | avcodec.avcodec_free_context(codecContext); |
229 | } |
230 | |
231 | swscale.sws_freeContext(sws_ctx); |
232 | if (rgbFrame != null) av_frame_free(rgbFrame); |
233 | if (yuv420Frame != null) av_frame_free(yuv420Frame); |
234 | if (avfmtCtx != null) { |
235 | avformat.avformat_close_input(avfmtCtx); |
236 | avformat.avformat_free_context(avfmtCtx); |
237 | } |
238 | } |
239 | |
240 | void stop { |
241 | stopped(true); |
242 | interruptThread(decodeThread); |
243 | } |
244 | |
245 | void startDecodeThread { |
246 | if (decodeThread != null) ret; |
247 | decodeThread = startThread("Decode video", r { |
248 | stepAll(ImageStreamFromVideoFile.this); |
249 | if (stopped()) _actualClose(); |
250 | }); |
251 | } |
252 | } // end of ImageStreamFromVideoFile |
Began life as a copy of #1033824
download show line numbers debug dex old transpilations
Travelled to 3 computer(s): bhatertpkbcr, ekrmjmnbrukm, mqqgnosmbjvj
No comments. add comment
Snippet ID: | #1034381 |
Snippet name: | ImageStreamFromVideoFile [WORKS on Linux and Windows] |
Eternal ID of this version: | #1034381/27 |
Text MD5: | c8071c224dc64925f70ce1b2a0022d57 |
Transpilation MD5: | d2f856a26b4c5680a743b9ab15a8908b |
Author: | stefan |
Category: | javax / video |
Type: | JavaX fragment (include) |
Public (visible to everyone): | Yes |
Archived (hidden from active list): | No |
Created/modified: | 2022-03-30 21:49:31 |
Source code size: | 7691 bytes / 252 lines |
Pitched / IR pitched: | No / No |
Views / Downloads: | 223 / 407 |
Version history: | 26 change(s) |
Referenced in: | [show references] |