forked from PhilLab/Android-MediaCodec-Examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCameraToMpegTest.java
952 lines (833 loc) · 38.4 KB
/
CameraToMpegTest.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.cts;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Environment;
import android.test.AndroidTestCase;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
//20131106: removed unnecessary glFinish(), removed hard-coded "/sdcard"
//20131205: added alpha to EGLConfig
//20131210: demonstrate un-bind and re-bind of texture, for apps with shared EGL contexts
//20140123: correct error checks on glGet*Location() and program creation (they don't set error)
/**
* Record video from the camera preview and encode it as an MP4 file. Demonstrates the use
* of MediaMuxer and MediaCodec with Camera input. Does not record audio.
* <p>
* Generally speaking, it's better to use MediaRecorder for this sort of thing. This example
* demonstrates one possible advantage: editing of video as it's being encoded. A GLES 2.0
* fragment shader is used to perform a silly color tweak every 15 frames.
* <p>
* This uses various features first available in Android "Jellybean" 4.3 (API 18). There is
* no equivalent functionality in previous releases. (You can send the Camera preview to a
* byte buffer with a fully-specified format, but MediaCodec encoders want different input
* formats on different devices, and this use case wasn't well exercised in CTS pre-4.3.)
* <p>
* The output file will be something like "/sdcard/test.640x480.mp4".
* <p>
* (This was derived from bits and pieces of CTS tests, and is packaged as such, but is not
* currently part of CTS.)
*/
public class CameraToMpegTest extends AndroidTestCase {
private static final String TAG = "CameraToMpegTest";
private static final boolean VERBOSE = false; // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory();
// parameters for the encoder
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private static final long DURATION_SEC = 8; // 8 seconds of video
// Fragment shader that swaps color channels around.
private static final String SWAPPED_FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord).gbra;\n" +
"}\n";
// encoder / muxer state
private MediaCodec mEncoder;
private CodecInputSurface mInputSurface;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
// camera state
private Camera mCamera;
private SurfaceTextureManager mStManager;
// allocate one of these up front so we don't need to do it every time
private MediaCodec.BufferInfo mBufferInfo;
/** test entry point */
public void testEncodeCameraToMp4() throws Throwable {
CameraToMpegWrapper.runTest(this);
}
/**
* Wraps encodeCameraToMpeg(). This is necessary because SurfaceTexture will try to use
* the looper in the current thread if one exists, and the CTS tests create one on the
* test thread.
*
* The wrapper propagates exceptions thrown by the worker thread back to the caller.
*/
private static class CameraToMpegWrapper implements Runnable {
private Throwable mThrowable;
private CameraToMpegTest mTest;
private CameraToMpegWrapper(CameraToMpegTest test) {
mTest = test;
}
@Override
public void run() {
try {
mTest.encodeCameraToMpeg();
} catch (Throwable th) {
mThrowable = th;
}
}
/** Entry point. */
public static void runTest(CameraToMpegTest obj) throws Throwable {
CameraToMpegWrapper wrapper = new CameraToMpegWrapper(obj);
Thread th = new Thread(wrapper, "codec test");
th.start();
th.join();
if (wrapper.mThrowable != null) {
throw wrapper.mThrowable;
}
}
}
/**
* Tests encoding of AVC video from Camera input. The output is saved as an MP4 file.
*/
private void encodeCameraToMpeg() {
// arbitrary but popular values
int encWidth = 640;
int encHeight = 480;
int encBitRate = 6000000; // Mbps
Log.d(TAG, MIME_TYPE + " output " + encWidth + "x" + encHeight + " @" + encBitRate);
try {
prepareCamera(encWidth, encHeight);
prepareEncoder(encWidth, encHeight, encBitRate);
mInputSurface.makeCurrent();
prepareSurfaceTexture();
mCamera.startPreview();
long startWhen = System.nanoTime();
long desiredEnd = startWhen + DURATION_SEC * 1000000000L;
SurfaceTexture st = mStManager.getSurfaceTexture();
int frameCount = 0;
while (System.nanoTime() < desiredEnd) {
// Feed any pending encoder output into the muxer.
drainEncoder(false);
// Switch up the colors every 15 frames. Besides demonstrating the use of
// fragment shaders for video editing, this provides a visual indication of
// the frame rate: if the camera is capturing at 15fps, the colors will change
// once per second.
if ((frameCount % 15) == 0) {
String fragmentShader = null;
if ((frameCount & 0x01) != 0) {
fragmentShader = SWAPPED_FRAGMENT_SHADER;
}
mStManager.changeFragmentShader(fragmentShader);
}
frameCount++;
// Acquire a new frame of input, and render it to the Surface. If we had a
// GLSurfaceView we could switch EGL contexts and call drawImage() a second
// time to render it on screen. The texture can be shared between contexts by
// passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
// argument.
mStManager.awaitNewImage();
mStManager.drawImage();
// Set the presentation time stamp from the SurfaceTexture's time stamp. This
// will be used by MediaMuxer to set the PTS in the video.
if (VERBOSE) {
Log.d(TAG, "present: " +
((st.getTimestamp() - startWhen) / 1000000.0) + "ms");
}
mInputSurface.setPresentationTime(st.getTimestamp());
// Submit it to the encoder. The eglSwapBuffers call will block if the input
// is full, which would be bad if it stayed full until we dequeued an output
// buffer (which we can't do, since we're stuck here). So long as we fully drain
// the encoder before supplying additional input, the system guarantees that we
// can supply another frame without blocking.
if (VERBOSE) Log.d(TAG, "sending frame to encoder");
mInputSurface.swapBuffers();
}
// send end-of-stream to encoder, and drain remaining output
drainEncoder(true);
} finally {
// release everything we grabbed
releaseCamera();
releaseEncoder();
releaseSurfaceTexture();
}
}
/**
* Configures Camera for video capture. Sets mCamera.
* <p>
* Opens a Camera and sets parameters. Does not start preview.
*/
private void prepareCamera(int encWidth, int encHeight) {
if (mCamera != null) {
throw new RuntimeException("camera already initialized");
}
Camera.CameraInfo info = new Camera.CameraInfo();
// Try to find a front-facing camera (e.g. for videoconferencing).
int numCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mCamera = Camera.open(i);
break;
}
}
if (mCamera == null) {
Log.d(TAG, "No front-facing camera found; opening default");
mCamera = Camera.open(); // opens first back-facing camera
}
if (mCamera == null) {
throw new RuntimeException("Unable to open camera");
}
Camera.Parameters parms = mCamera.getParameters();
choosePreviewSize(parms, encWidth, encHeight);
// leave the frame rate set to default
mCamera.setParameters(parms);
Camera.Size size = parms.getPreviewSize();
Log.d(TAG, "Camera preview size is " + size.width + "x" + size.height);
}
/**
* Attempts to find a preview size that matches the provided width and height (which
* specify the dimensions of the encoded video). If it fails to find a match it just
* uses the default preview size.
* <p>
* TODO: should do a best-fit match.
*/
private static void choosePreviewSize(Camera.Parameters parms, int width, int height) {
// We should make sure that the requested MPEG size is less than the preferred
// size, and has the same aspect ratio.
Camera.Size ppsfv = parms.getPreferredPreviewSizeForVideo();
if (VERBOSE && ppsfv != null) {
Log.d(TAG, "Camera preferred preview size for video is " +
ppsfv.width + "x" + ppsfv.height);
}
for (Camera.Size size : parms.getSupportedPreviewSizes()) {
if (size.width == width && size.height == height) {
parms.setPreviewSize(width, height);
return;
}
}
Log.w(TAG, "Unable to set preview size to " + width + "x" + height);
if (ppsfv != null) {
parms.setPreviewSize(ppsfv.width, ppsfv.height);
}
}
/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera() {
if (VERBOSE) Log.d(TAG, "releasing camera");
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/**
* Configures SurfaceTexture for camera preview. Initializes mStManager, and sets the
* associated SurfaceTexture as the Camera's "preview texture".
* <p>
* Configure the EGL surface that will be used for output before calling here.
*/
private void prepareSurfaceTexture() {
mStManager = new SurfaceTextureManager();
SurfaceTexture st = mStManager.getSurfaceTexture();
try {
mCamera.setPreviewTexture(st);
} catch (IOException ioe) {
throw new RuntimeException("setPreviewTexture failed", ioe);
}
}
/**
* Releases the SurfaceTexture.
*/
private void releaseSurfaceTexture() {
if (mStManager != null) {
mStManager.release();
mStManager = null;
}
}
/**
* Configures encoder and muxer state, and prepares the input Surface. Initializes
* mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted.
*/
private void prepareEncoder(int width, int height, int bitRate) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
//
// If you want to have two EGL contexts -- one for display, one for recording --
// you will likely want to defer instantiation of CodecInputSurface until after the
// "display" EGL context is created, then modify the eglCreateContext call to
// take eglGetCurrentContext() as the share_context argument.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = new CodecInputSurface(mEncoder.createInputSurface());
mEncoder.start();
// Output filename. Ideally this would use Context.getFilesDir() rather than a
// hard-coded output directory.
String outputPath = new File(OUTPUT_DIR,
"test." + width + "x" + height + ".mp4").toString();
Log.i(TAG, "Output file is " + outputPath);
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
try {
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* Releases encoder resources.
*/
private void releaseEncoder() {
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
private void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses
* that to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to
* be sent to the video encoder.
* <p>
* This object owns the Surface -- releasing this will release the Surface too.
*/
private static class CodecInputSurface {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private Surface mSurface;
/**
* Creates a CodecInputSurface from a Surface.
*/
public CodecInputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for recording and OpenGL ES 2.0.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0);
checkEglError("eglCreateContext RGB888+recordable ES2");
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
attrib_list, 0);
checkEglError("eglCreateContext");
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
}
/**
* Discards all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mSurface.release();
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLSurface = EGL14.EGL_NO_SURFACE;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
checkEglError("eglMakeCurrent");
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
checkEglError("eglSwapBuffers");
return result;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
checkEglError("eglPresentationTimeANDROID");
}
/**
* Checks for EGL errors. Throws an exception if one is found.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}
/**
* Manages a SurfaceTexture. Creates SurfaceTexture and TextureRender objects, and provides
* functions that wait for frames and render them to the current EGL surface.
* <p>
* The SurfaceTexture can be passed to Camera.setPreviewTexture() to receive camera output.
*/
private static class SurfaceTextureManager
implements SurfaceTexture.OnFrameAvailableListener {
private SurfaceTexture mSurfaceTexture;
private CameraToMpegTest.STextureRender mTextureRender;
private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
private boolean mFrameAvailable;
/**
* Creates instances of TextureRender and SurfaceTexture.
*/
public SurfaceTextureManager() {
mTextureRender = new CameraToMpegTest.STextureRender();
mTextureRender.surfaceCreated();
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// This doesn't work if this object is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture.setOnFrameAvailableListener(this);
}
public void release() {
// this causes a bunch of warnings that appear harmless but might confuse someone:
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
//mSurfaceTexture.release();
mTextureRender = null;
mSurfaceTexture = null;
}
/**
* Returns the SurfaceTexture.
*/
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
mTextureRender.changeFragmentShader(fragmentShader);
}
/**
* Latches the next buffer into the texture. Must be called from the thread that created
* the OutputSurface object.
*/
public void awaitNewImage() {
final int TIMEOUT_MS = 2500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Camera frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}
// Latch the data.
mTextureRender.checkGlError("before updateTexImage");
mSurfaceTexture.updateTexImage();
}
/**
* Draws the data from SurfaceTexture onto the current EGL surface.
*/
public void drawImage() {
mTextureRender.drawFrame(mSurfaceTexture);
}
@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) Log.d(TAG, "new frame available");
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
}
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
private static class STextureRender {
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID = -12345;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
public STextureRender() {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public int getTextureId() {
return mTextureID;
}
public void drawFrame(SurfaceTexture st) {
checkGlError("onDrawFrame start");
st.getTransformMatrix(mSTMatrix);
// (optional) clear to green so we can see if we're failing to set pixels
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
// IMPORTANT: on some devices, if you are sharing the external texture between two
// contexts, one context may not see updates to the texture unless you un-bind and
// re-bind it. If you're not using shared EGL contexts, you don't need to bind
// texture 0 here.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated() {
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkLocation(maPositionHandle, "aPosition");
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkLocation(maTextureHandle, "aTextureCoord");
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkLocation(muMVPMatrixHandle, "uMVPMatrix");
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkLocation(muSTMatrixHandle, "uSTMatrix");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
}
/**
* Replaces the fragment shader. Pass in null to reset to default.
*/
public void changeFragmentShader(String fragmentShader) {
if (fragmentShader == null) {
fragmentShader = FRAGMENT_SHADER;
}
GLES20.glDeleteProgram(mProgram);
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
public static void checkLocation(int location, String label) {
if (location < 0) {
throw new RuntimeException("Unable to locate '" + label + "' in program");
}
}
}
}