DecodeEditEncodeTest.java

来源:互联网 发布:数码收纳包 知乎 编辑:程序博客网 时间:2024/05/19 19:34
  1. /*
  2. * Copyright (C) 2013 The Android Open Source Project
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package android.media.cts;
  17. import android.media.MediaCodec;
  18. import android.media.MediaCodecInfo;
  19. import android.media.MediaCodecList;
  20. import android.media.MediaFormat;
  21. import android.opengl.GLES20;
  22. import android.test.AndroidTestCase;
  23. import android.util.Log;
  24. import java.io.BufferedOutputStream;
  25. import java.io.File;
  26. import java.io.FileOutputStream;
  27. import java.io.IOException;
  28. import java.nio.ByteBuffer;
  29. import java.util.ArrayList;
  30. import javax.microedition.khronos.opengles.GL10;
  31. /**
  32. * This test has three steps:
  33. * <ol>
  34. * <li>Generate a video test stream.
  35. * <li>Decode the video from the stream, rendering frames into a SurfaceTexture.
  36. * Render the texture onto a Surface that feeds a video encoder, modifying
  37. * the output with a fragment shader.
  38. * <li>Decode the second video and compare it to the expected result.
  39. * </ol><p>
  40. * The second step is a typical scenario for video editing. We could do all this in one
  41. * step, feeding data through multiple stages of MediaCodec, but at some point we're
  42. * no longer exercising the code in the way we expect it to be used (and the code
  43. * gets a bit unwieldy).
  44. */
  45. publicclassDecodeEditEncodeTestextendsAndroidTestCase{
  46. privatestaticfinalString TAG="DecodeEditEncode";
  47. privatestaticfinalboolean WORK_AROUND_BUGS=false;// avoid fatal codec bugs
  48. privatestaticfinalboolean VERBOSE=false;// lots of logging
  49. privatestaticfinalboolean DEBUG_SAVE_FILE=false;// save copy of encoded movie
  50. privatestaticfinalString DEBUG_FILE_NAME_BASE="/sdcard/test.";
  51. // parameters for the encoder
  52. privatestaticfinalString MIME_TYPE="video/avc";// H.264 Advanced Video Coding
  53. privatestaticfinalint FRAME_RATE=15;// 15fps
  54. privatestaticfinalint IFRAME_INTERVAL=10;// 10 seconds between I-frames
  55. // movie length, in frames
  56. privatestaticfinalint NUM_FRAMES=30;// two seconds of video
  57. privatestaticfinalint TEST_R0=0;// dull green background
  58. privatestaticfinalint TEST_G0=136;
  59. privatestaticfinalint TEST_B0=0;
  60. privatestaticfinalint TEST_R1=236;// pink; BT.601 YUV {120,160,200}
  61. privatestaticfinalint TEST_G1=50;
  62. privatestaticfinalint TEST_B1=186;
  63. // Replaces TextureRender.FRAGMENT_SHADER during edit; swaps green and blue channels.
  64. privatestaticfinalString FRAGMENT_SHADER=
  65. "#extension GL_OES_EGL_image_external : require\n"+
  66. "precision mediump float;\n"+
  67. "varying vec2 vTextureCoord;\n"+
  68. "uniform samplerExternalOES sTexture;\n"+
  69. "void main() {\n"+
  70. " gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n"+
  71. "}\n";
  72. // size of a frame, in pixels
  73. privateint mWidth=-1;
  74. privateint mHeight=-1;
  75. // bit rate, in bits per second
  76. privateint mBitRate=-1;
  77. // largest color component delta seen (i.e. actual vs. expected)
  78. privateint mLargestColorDelta;
  79. publicvoid testVideoEditQCIF()throwsThrowable{
  80. setParameters(176,144,1000000);
  81. VideoEditWrapper.runTest(this);
  82. }
  83. publicvoid testVideoEditQVGA()throwsThrowable{
  84. setParameters(320,240,2000000);
  85. VideoEditWrapper.runTest(this);
  86. }
  87. publicvoid testVideoEdit720p()throwsThrowable{
  88. setParameters(1280,720,6000000);
  89. VideoEditWrapper.runTest(this);
  90. }
  91. /**
  92. * Wraps testEditVideo, running it in a new thread. Required because of the way
  93. * SurfaceTexture.OnFrameAvailableListener works when the current thread has a Looper
  94. * configured.
  95. */
  96. privatestaticclassVideoEditWrapperimplementsRunnable{
  97. privateThrowable mThrowable;
  98. privateDecodeEditEncodeTest mTest;
  99. privateVideoEditWrapper(DecodeEditEncodeTest test){
  100. mTest= test;
  101. }
  102. @Override
  103. publicvoid run(){
  104. try{
  105. mTest.videoEditTest();
  106. }catch(Throwable th){
  107. mThrowable= th;
  108. }
  109. }
  110. /** Entry point. */
  111. publicstaticvoid runTest(DecodeEditEncodeTest obj)throwsThrowable{
  112. VideoEditWrapper wrapper=newVideoEditWrapper(obj);
  113. Thread th=newThread(wrapper,"codec test");
  114. th.start();
  115. th.join();
  116. if(wrapper.mThrowable!=null){
  117. throw wrapper.mThrowable;
  118. }
  119. }
  120. }
  121. /**
  122. * Sets the desired frame size and bit rate.
  123. */
  124. privatevoid setParameters(int width,int height,int bitRate){
  125. if((width%16)!=0||(height%16)!=0){
  126. Log.w(TAG,"WARNING: width or height not multiple of 16");
  127. }
  128. mWidth= width;
  129. mHeight= height;
  130. mBitRate= bitRate;
  131. }
  132. /**
  133. * Tests editing of a video file with GL.
  134. */
  135. privatevoid videoEditTest(){
  136. VideoChunks sourceChunks=newVideoChunks();
  137. if(!generateVideoFile(sourceChunks)){
  138. // No AVC codec? Fail silently.
  139. return;
  140. }
  141. if(DEBUG_SAVE_FILE){
  142. // Save a copy to a file. We call it ".mp4", but it's actually just an elementary
  143. // stream, so not all video players will know what to do with it.
  144. String dirName= getContext().getFilesDir().getAbsolutePath();
  145. String fileName="vedit1_"+ mWidth+"x"+ mHeight+".mp4";
  146. sourceChunks.saveToFile(newFile(dirName, fileName));
  147. }
  148. VideoChunks destChunks= editVideoFile(sourceChunks);
  149. if(DEBUG_SAVE_FILE){
  150. String dirName= getContext().getFilesDir().getAbsolutePath();
  151. String fileName="vedit2_"+ mWidth+"x"+ mHeight+".mp4";
  152. destChunks.saveToFile(newFile(dirName, fileName));
  153. }
  154. checkVideoFile(destChunks);
  155. }
  156. /**
  157. * Generates a test video file, saving it as VideoChunks. We generate frames with GL to
  158. * avoid having to deal with multiple YUV formats.
  159. *
  160. * @return true on success, false on "soft" failure
  161. */
  162. privateboolean generateVideoFile(VideoChunks output){
  163. if(VERBOSE)Log.d(TAG,"generateVideoFile "+ mWidth+"x"+ mHeight);
  164. MediaCodec encoder=null;
  165. InputSurface inputSurface=null;
  166. try{
  167. MediaCodecInfo codecInfo= selectCodec(MIME_TYPE);
  168. if(codecInfo==null){
  169. // Don't fail CTS if they don't have an AVC codec (not here, anyway).
  170. Log.e(TAG,"Unable to find an appropriate codec for "+ MIME_TYPE);
  171. returnfalse;
  172. }
  173. if(VERBOSE)Log.d(TAG,"found codec: "+ codecInfo.getName());
  174. // We avoid the device-specific limitations on width and height by using values that
  175. // are multiples of 16, which all tested devices seem to be able to handle.
  176. MediaFormat format=MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
  177. // Set some properties. Failing to specify some of these can cause the MediaCodec
  178. // configure() call to throw an unhelpful exception.
  179. format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
  180. MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
  181. format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
  182. format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
  183. format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
  184. if(VERBOSE)Log.d(TAG,"format: "+ format);
  185. output.setMediaFormat(format);
  186. // Create a MediaCodec for the desired codec, then configure it as an encoder with
  187. // our desired properties.
  188. encoder=MediaCodec.createByCodecName(codecInfo.getName());
  189. encoder.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
  190. inputSurface=newInputSurface(encoder.createInputSurface());
  191. inputSurface.makeCurrent();
  192. encoder.start();
  193. generateVideoData(encoder, inputSurface, output);
  194. }finally{
  195. if(encoder!=null){
  196. if(VERBOSE)Log.d(TAG,"releasing encoder");
  197. encoder.stop();
  198. encoder.release();
  199. if(VERBOSE)Log.d(TAG,"released encoder");
  200. }
  201. if(inputSurface!=null){
  202. inputSurface.release();
  203. }
  204. }
  205. returntrue;
  206. }
  207. /**
  208. * Returns the first codec capable of encoding the specified MIME type, or null if no
  209. * match was found.
  210. */
  211. privatestaticMediaCodecInfo selectCodec(String mimeType){
  212. int numCodecs=MediaCodecList.getCodecCount();
  213. for(int i=0; i< numCodecs; i++){
  214. MediaCodecInfo codecInfo=MediaCodecList.getCodecInfoAt(i);
  215. if(!codecInfo.isEncoder()){
  216. continue;
  217. }
  218. String[] types= codecInfo.getSupportedTypes();
  219. for(int j=0; j< types.length; j++){
  220. if(types[j].equalsIgnoreCase(mimeType)){
  221. return codecInfo;
  222. }
  223. }
  224. }
  225. returnnull;
  226. }
  227. /**
  228. * Generates video frames, feeds them into the encoder, and writes the output to the
  229. * VideoChunks instance.
  230. */
  231. privatevoid generateVideoData(MediaCodec encoder,InputSurface inputSurface,
  232. VideoChunks output){
  233. finalint TIMEOUT_USEC=10000;
  234. ByteBuffer[] encoderOutputBuffers= encoder.getOutputBuffers();
  235. MediaCodec.BufferInfo info =newMediaCodec.BufferInfo();
  236. int generateIndex=0;
  237. int outputCount=0;
  238. // Loop until the output side is done.
  239. boolean inputDone=false;
  240. boolean outputDone=false;
  241. while(!outputDone){
  242. if(VERBOSE)Log.d(TAG,"gen loop");
  243. // If we're not done submitting frames, generate a new one and submit it. The
  244. // eglSwapBuffers call will block if the input is full.
  245. if(!inputDone){
  246. if(generateIndex== NUM_FRAMES){
  247. // Send an empty frame with the end-of-stream flag set.
  248. if(VERBOSE)Log.d(TAG,"signaling input EOS");
  249. if(WORK_AROUND_BUGS){
  250. // Might drop a frame, but at least we won't crash mediaserver.
  251. try{Thread.sleep(500);}catch(InterruptedException ie){}
  252. outputDone=true;
  253. }else{
  254. encoder.signalEndOfInputStream();
  255. }
  256. inputDone=true;
  257. }else{
  258. generateSurfaceFrame(generateIndex);
  259. inputSurface.setPresentationTime(computePresentationTime(generateIndex)*1000);
  260. if(VERBOSE)Log.d(TAG,"inputSurface swapBuffers");
  261. inputSurface.swapBuffers();
  262. }
  263. generateIndex++;
  264. }
  265. // Check for output from the encoder. If there's no output yet, we either need to
  266. // provide more input, or we need to wait for the encoder to work its magic. We
  267. // can't actually tell which is the case, so if we can't get an output buffer right
  268. // away we loop around and see if it wants more input.
  269. //
  270. // If we do find output, drain it all before supplying more input.
  271. while(true){
  272. int encoderStatus= encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
  273. if(encoderStatus==MediaCodec.INFO_TRY_AGAIN_LATER){
  274. // no output available yet
  275. if(VERBOSE)Log.d(TAG,"no output from encoder available");
  276. break;// out of while
  277. }elseif(encoderStatus==MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
  278. // not expected for an encoder
  279. encoderOutputBuffers= encoder.getOutputBuffers();
  280. if(VERBOSE)Log.d(TAG,"encoder output buffers changed");
  281. }elseif(encoderStatus==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
  282. // not expected for an encoder
  283. MediaFormat newFormat= encoder.getOutputFormat();
  284. if(VERBOSE)Log.d(TAG,"encoder output format changed: "+ newFormat);
  285. }elseif(encoderStatus<0){
  286. fail("unexpected result from encoder.dequeueOutputBuffer: "+ encoderStatus);
  287. }else{// encoderStatus >= 0
  288. ByteBuffer encodedData= encoderOutputBuffers[encoderStatus];
  289. if(encodedData==null){
  290. fail("encoderOutputBuffer "+ encoderStatus+" was null");
  291. }
  292. // Codec config flag must be set iff this is the first chunk of output. This
  293. // may not hold for all codecs, but it appears to be the case for video/avc.
  294. assertTrue((info.flags&MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!=0||
  295. outputCount!=0);
  296. if(info.size!=0){
  297. // Adjust the ByteBuffer values to match BufferInfo.
  298. encodedData.position(info.offset);
  299. encodedData.limit(info.offset+ info.size);
  300. output.addChunk(encodedData, info.flags, info.presentationTimeUs);
  301. outputCount++;
  302. }
  303. encoder.releaseOutputBuffer(encoderStatus,false);
  304. if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
  305. outputDone=true;
  306. break;// out of while
  307. }
  308. }
  309. }
  310. }
  311. // One chunk per frame, plus one for the config data.
  312. assertEquals("Frame count", NUM_FRAMES +1, outputCount);
  313. }
  314. /**
  315. * Generates a frame of data using GL commands.
  316. * <p>
  317. * We have an 8-frame animation sequence that wraps around. It looks like this:
  318. * <pre>
  319. * 0 1 2 3
  320. * 7 6 5 4
  321. * </pre>
  322. * We draw one of the eight rectangles and leave the rest set to the zero-fill color. */
  323. privatevoid generateSurfaceFrame(int frameIndex){
  324. frameIndex%=8;
  325. int startX, startY;
  326. if(frameIndex<4){
  327. // (0,0) is bottom-left in GL
  328. startX= frameIndex*(mWidth/4);
  329. startY= mHeight/2;
  330. }else{
  331. startX=(7- frameIndex)*(mWidth/4);
  332. startY=0;
  333. }
  334. GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
  335. GLES20.glClearColor(TEST_R0/255.0f, TEST_G0/255.0f, TEST_B0/255.0f,1.0f);
  336. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  337. GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
  338. GLES20.glScissor(startX, startY, mWidth/4, mHeight/2);
  339. GLES20.glClearColor(TEST_R1/255.0f, TEST_G1/255.0f, TEST_B1/255.0f,1.0f);
  340. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  341. }
  342. /**
  343. * Edits a video file, saving the contents to a new file. This involves decoding and
  344. * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
  345. * <p>
  346. * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
  347. * output, but it's not practical to support all OEM formats. By using a SurfaceTexture
  348. * for output and a Surface for input, we can avoid issues with obscure formats and can
  349. * use a fragment shader to do transformations.
  350. */
  351. privateVideoChunks editVideoFile(VideoChunks inputData){
  352. if(VERBOSE)Log.d(TAG,"editVideoFile "+ mWidth+"x"+ mHeight);
  353. VideoChunks outputData=newVideoChunks();
  354. MediaCodec decoder=null;
  355. MediaCodec encoder=null;
  356. InputSurface inputSurface=null;
  357. OutputSurface outputSurface=null;
  358. try{
  359. MediaFormat inputFormat= inputData.getMediaFormat();
  360. // Create an encoder format that matches the input format. (Might be able to just
  361. // re-use the format used to generate the video, since we want it to be the same.)
  362. MediaFormat outputFormat=MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
  363. outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
  364. MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
  365. outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
  366. inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
  367. outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
  368. inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
  369. outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
  370. inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
  371. outputData.setMediaFormat(outputFormat);
  372. encoder=MediaCodec.createEncoderByType(MIME_TYPE);
  373. encoder.configure(outputFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
  374. inputSurface=newInputSurface(encoder.createInputSurface());
  375. inputSurface.makeCurrent();
  376. encoder.start();
  377. // OutputSurface uses the EGL context created by InputSurface.
  378. decoder=MediaCodec.createDecoderByType(MIME_TYPE);
  379. outputSurface=newOutputSurface();
  380. outputSurface.changeFragmentShader(FRAGMENT_SHADER);
  381. decoder.configure(inputFormat, outputSurface.getSurface(),null,0);
  382. decoder.start();
  383. editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
  384. }finally{
  385. if(VERBOSE)Log.d(TAG,"shutting down encoder, decoder");
  386. if(outputSurface!=null){
  387. outputSurface.release();
  388. }
  389. if(inputSurface!=null){
  390. inputSurface.release();
  391. }
  392. if(encoder!=null){
  393. encoder.stop();
  394. encoder.release();
  395. }
  396. if(decoder!=null){
  397. decoder.stop();
  398. decoder.release();
  399. }
  400. }
  401. return outputData;
  402. }
  403. /**
  404. * Edits a stream of video data.
  405. */
  406. privatevoid editVideoData(VideoChunks inputData,MediaCodec decoder,
  407. OutputSurface outputSurface,InputSurface inputSurface,MediaCodec encoder,
  408. VideoChunks outputData){
  409. finalint TIMEOUT_USEC=10000;
  410. ByteBuffer[] decoderInputBuffers= decoder.getInputBuffers();
  411. ByteBuffer[] encoderOutputBuffers= encoder.getOutputBuffers();
  412. MediaCodec.BufferInfo info =newMediaCodec.BufferInfo();
  413. int inputChunk=0;
  414. int outputCount=0;
  415. boolean outputDone=false;
  416. boolean inputDone=false;
  417. boolean decoderDone=false;
  418. while(!outputDone){
  419. if(VERBOSE)Log.d(TAG,"edit loop");
  420. // Feed more data to the decoder.
  421. if(!inputDone){
  422. int inputBufIndex= decoder.dequeueInputBuffer(TIMEOUT_USEC);
  423. if(inputBufIndex>=0){
  424. if(inputChunk== inputData.getNumChunks()){
  425. // End of stream -- send empty frame with EOS flag set.
  426. decoder.queueInputBuffer(inputBufIndex,0,0,0L,
  427. MediaCodec.BUFFER_FLAG_END_OF_STREAM);
  428. inputDone=true;
  429. if(VERBOSE)Log.d(TAG,"sent input EOS (with zero-length frame)");
  430. }else{
  431. // Copy a chunk of input to the decoder. The first chunk should have
  432. // the BUFFER_FLAG_CODEC_CONFIG flag set.
  433. ByteBuffer inputBuf= decoderInputBuffers[inputBufIndex];
  434. inputBuf.clear();
  435. inputData.getChunkData(inputChunk, inputBuf);
  436. int flags= inputData.getChunkFlags(inputChunk);
  437. long time= inputData.getChunkTime(inputChunk);
  438. decoder.queueInputBuffer(inputBufIndex,0, inputBuf.position(),
  439. time, flags);
  440. if(VERBOSE){
  441. Log.d(TAG,"submitted frame "+ inputChunk+" to dec, size="+
  442. inputBuf.position()+" flags="+ flags);
  443. }
  444. inputChunk++;
  445. }
  446. }else{
  447. if(VERBOSE)Log.d(TAG,"input buffer not available");
  448. }
  449. }
  450. // Assume output is available. Loop until both assumptions are false.
  451. boolean decoderOutputAvailable=!decoderDone;
  452. boolean encoderOutputAvailable=true;
  453. while(decoderOutputAvailable|| encoderOutputAvailable){
  454. // Start by draining any pending output from the encoder. It's important to
  455. // do this before we try to stuff any more data in.
  456. int encoderStatus= encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
  457. if(encoderStatus==MediaCodec.INFO_TRY_AGAIN_LATER){
  458. // no output available yet
  459. if(VERBOSE)Log.d(TAG,"no output from encoder available");
  460. encoderOutputAvailable=false;
  461. }elseif(encoderStatus==MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
  462. encoderOutputBuffers= encoder.getOutputBuffers();
  463. if(VERBOSE)Log.d(TAG,"encoder output buffers changed");
  464. }elseif(encoderStatus==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
  465. MediaFormat newFormat= encoder.getOutputFormat();
  466. if(VERBOSE)Log.d(TAG,"encoder output format changed: "+ newFormat);
  467. }elseif(encoderStatus<0){
  468. fail("unexpected result from encoder.dequeueOutputBuffer: "+ encoderStatus);
  469. }else{// encoderStatus >= 0
  470. ByteBuffer encodedData= encoderOutputBuffers[encoderStatus];
  471. if(encodedData==null){
  472. fail("encoderOutputBuffer "+ encoderStatus+" was null");
  473. }
  474. // Write the data to the output "file".
  475. if(info.size!=0){
  476. encodedData.position(info.offset);
  477. encodedData.limit(info.offset+ info.size);
  478. outputData.addChunk(encodedData, info.flags, info.presentationTimeUs);
  479. outputCount++;
  480. if(VERBOSE)Log.d(TAG,"encoder output "+ info.size+" bytes");
  481. }
  482. outputDone=(info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0;
  483. encoder.releaseOutputBuffer(encoderStatus,false);
  484. }
  485. if(encoderStatus!=MediaCodec.INFO_TRY_AGAIN_LATER){
  486. // Continue attempts to drain output.
  487. continue;
  488. }
  489. // Encoder is drained, check to see if we've got a new frame of output from
  490. // the decoder. (The output is going to a Surface, rather than a ByteBuffer,
  491. // but we still get information through BufferInfo.)
  492. if(!decoderDone){
  493. int decoderStatus= decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
  494. if(decoderStatus==MediaCodec.INFO_TRY_AGAIN_LATER){
  495. // no output available yet
  496. if(VERBOSE)Log.d(TAG,"no output from decoder available");
  497. decoderOutputAvailable=false;
  498. }elseif(decoderStatus==MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
  499. //decoderOutputBuffers = decoder.getOutputBuffers();
  500. if(VERBOSE)Log.d(TAG,"decoder output buffers changed (we don't care)");
  501. }elseif(decoderStatus==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
  502. // expected before first buffer of data
  503. MediaFormat newFormat= decoder.getOutputFormat();
  504. if(VERBOSE)Log.d(TAG,"decoder output format changed: "+ newFormat);
  505. }elseif(decoderStatus<0){
  506. fail("unexpected result from decoder.dequeueOutputBuffer: "+decoderStatus);
  507. }else{// decoderStatus >= 0
  508. if(VERBOSE)Log.d(TAG,"surface decoder given buffer "
  509. + decoderStatus+" (size="+ info.size+")");
  510. // The ByteBuffers are null references, but we still get a nonzero
  511. // size for the decoded data.
  512. boolean doRender=(info.size!=0);
  513. // As soon as we call releaseOutputBuffer, the buffer will be forwarded
  514. // to SurfaceTexture to convert to a texture. The API doesn't
  515. // guarantee that the texture will be available before the call
  516. // returns, so we need to wait for the onFrameAvailable callback to
  517. // fire. If we don't wait, we risk rendering from the previous frame.
  518. decoder.releaseOutputBuffer(decoderStatus, doRender);
  519. if(doRender){
  520. // This waits for the image and renders it after it arrives.
  521. if(VERBOSE)Log.d(TAG,"awaiting frame");
  522. outputSurface.awaitNewImage();
  523. outputSurface.drawImage();
  524. // Send it to the encoder.
  525. inputSurface.setPresentationTime(info.presentationTimeUs*1000);
  526. if(VERBOSE)Log.d(TAG,"swapBuffers");
  527. inputSurface.swapBuffers();
  528. }
  529. if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
  530. // forward decoder EOS to encoder
  531. if(VERBOSE)Log.d(TAG,"signaling input EOS");
  532. if(WORK_AROUND_BUGS){
  533. // Bail early, possibly dropping a frame.
  534. return;
  535. }else{
  536. encoder.signalEndOfInputStream();
  537. }
  538. }
  539. }
  540. }
  541. }
  542. }
  543. if(inputChunk!= outputCount){
  544. thrownewRuntimeException("frame lost: "+ inputChunk+" in, "+
  545. outputCount+" out");
  546. }
  547. }
  548. /**
  549. * Checks the video file to see if the contents match our expectations. We decode the
  550. * video to a Surface and check the pixels with GL.
  551. */
  552. privatevoid checkVideoFile(VideoChunks inputData){
  553. OutputSurface surface=null;
  554. MediaCodec decoder=null;
  555. mLargestColorDelta=-1;
  556. if(VERBOSE)Log.d(TAG,"checkVideoFile");
  557. try{
  558. surface=newOutputSurface(mWidth, mHeight);
  559. MediaFormat format= inputData.getMediaFormat();
  560. decoder=MediaCodec.createDecoderByType(MIME_TYPE);
  561. decoder.configure(format, surface.getSurface(),null,0);
  562. decoder.start();
  563. int badFrames= checkVideoData(inputData, decoder, surface);
  564. if(badFrames!=0){
  565. fail("Found "+ badFrames+" bad frames");
  566. }
  567. }finally{
  568. if(surface!=null){
  569. surface.release();
  570. }
  571. if(decoder!=null){
  572. decoder.stop();
  573. decoder.release();
  574. }
  575. Log.i(TAG,"Largest color delta: "+ mLargestColorDelta);
  576. }
  577. }
  578. /**
  579. * Checks the video data.
  580. *
  581. * @return the number of bad frames
  582. */
  583. privateint checkVideoData(VideoChunks inputData,MediaCodec decoder,OutputSurface surface){
  584. finalint TIMEOUT_USEC=1000;
  585. ByteBuffer[] decoderInputBuffers= decoder.getInputBuffers();
  586. ByteBuffer[] decoderOutputBuffers= decoder.getOutputBuffers();
  587. MediaCodec.BufferInfo info =newMediaCodec.BufferInfo();
  588. int inputChunk=0;
  589. int checkIndex=0;
  590. int badFrames=0;
  591. boolean outputDone=false;
  592. boolean inputDone=false;
  593. while(!outputDone){
  594. if(VERBOSE)Log.d(TAG,"check loop");
  595. // Feed more data to the decoder.
  596. if(!inputDone){
  597. int inputBufIndex= decoder.dequeueInputBuffer(TIMEOUT_USEC);
  598. if(inputBufIndex>=0){
  599. if(inputChunk== inputData.getNumChunks()){
  600. // End of stream -- send empty frame with EOS flag set.
  601. decoder.queueInputBuffer(inputBufIndex,0,0,0L,
  602. MediaCodec.BUFFER_FLAG_END_OF_STREAM);
  603. inputDone=true;
  604. if(VERBOSE)Log.d(TAG,"sent input EOS");
  605. }else{
  606. // Copy a chunk of input to the decoder. The first chunk should have
  607. // the BUFFER_FLAG_CODEC_CONFIG flag set.
  608. ByteBuffer inputBuf= decoderInputBuffers[inputBufIndex];
  609. inputBuf.clear();
  610. inputData.getChunkData(inputChunk, inputBuf);
  611. int flags= inputData.getChunkFlags(inputChunk);
  612. long time= inputData.getChunkTime(inputChunk);
  613. decoder.queueInputBuffer(inputBufIndex,0, inputBuf.position(),
  614. time, flags);
  615. if(VERBOSE){
  616. Log.d(TAG,"submitted frame "+ inputChunk+" to dec, size="+
  617. inputBuf.position()+" flags="+ flags);
  618. }
  619. inputChunk++;
  620. }
  621. }else{
  622. if(VERBOSE)Log.d(TAG,"input buffer not available");
  623. }
  624. }
  625. if(!outputDone){
  626. int decoderStatus= decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
  627. if(decoderStatus==MediaCodec.INFO_TRY_AGAIN_LATER){
  628. // no output available yet
  629. if(VERBOSE)Log.d(TAG,"no output from decoder available");
  630. }elseif(decoderStatus==MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
  631. decoderOutputBuffers= decoder.getOutputBuffers();
  632. if(VERBOSE)Log.d(TAG,"decoder output buffers changed");
  633. }elseif(decoderStatus==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
  634. MediaFormat newFormat= decoder.getOutputFormat();
  635. if(VERBOSE)Log.d(TAG,"decoder output format changed: "+ newFormat);
  636. }elseif(decoderStatus<0){
  637. fail("unexpected result from decoder.dequeueOutputBuffer: "+ decoderStatus);
  638. }else{// decoderStatus >= 0
  639. ByteBuffer decodedData= decoderOutputBuffers[decoderStatus];
  640. if(VERBOSE)Log.d(TAG,"surface decoder given buffer "+ decoderStatus+
  641. " (size="+ info.size+")");
  642. if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
  643. if(VERBOSE)Log.d(TAG,"output EOS");
  644. outputDone=true;
  645. }
  646. boolean doRender=(info.size!=0);
  647. // As soon as we call releaseOutputBuffer, the buffer will be forwarded
  648. // to SurfaceTexture to convert to a texture. The API doesn't guarantee
  649. // that the texture will be available before the call returns, so we
  650. // need to wait for the onFrameAvailable callback to fire.
  651. decoder.releaseOutputBuffer(decoderStatus, doRender);
  652. if(doRender){
  653. if(VERBOSE)Log.d(TAG,"awaiting frame "+ checkIndex);
  654. assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
  655. info.presentationTimeUs);
  656. surface.awaitNewImage();
  657. surface.drawImage();
  658. if(!checkSurfaceFrame(checkIndex++)){
  659. badFrames++;
  660. }
  661. }
  662. }
  663. }
  664. }
  665. return badFrames;
  666. }
  667. /**
  668. * Checks the frame for correctness, using GL to check RGB values.
  669. *
  670. * @return true if the frame looks good
  671. */
  672. privateboolean checkSurfaceFrame(int frameIndex){
  673. ByteBuffer pixelBuf=ByteBuffer.allocateDirect(4);// TODO - reuse this
  674. boolean frameFailed=false;
  675. for(int i=0; i<8; i++){
  676. // Note the coordinates are inverted on the Y-axis in GL.
  677. int x, y;
  678. if(i<4){
  679. x= i*(mWidth/4)+(mWidth/8);
  680. y=(mHeight*3)/4;
  681. }else{
  682. x=(7- i)*(mWidth/4)+(mWidth/8);
  683. y= mHeight/4;
  684. }
  685. GLES20.glReadPixels(x, y,1,1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf);
  686. int r= pixelBuf.get(0)&0xff;
  687. int g= pixelBuf.get(1)&0xff;
  688. int b= pixelBuf.get(2)&0xff;
  689. //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
  690. int expR, expG, expB;
  691. if(i== frameIndex%8){
  692. // colored rect (green/blue swapped)
  693. expR= TEST_R1;
  694. expG= TEST_B1;
  695. expB= TEST_G1;
  696. }else{
  697. // zero background color (green/blue swapped)
  698. expR= TEST_R0;
  699. expG= TEST_B0;
  700. expB= TEST_G0;
  701. }
  702. if(!isColorClose(r, expR)||
  703. !isColorClose(g, expG)||
  704. !isColorClose(b, expB)){
  705. Log.w(TAG,"Bad frame "+ frameIndex+" (rect="+ i+": rgb="+ r+
  706. ","+ g+","+ b+" vs. expected "+ expR+","+ expG+
  707. ","+ expB+")");
  708. frameFailed=true;
  709. }
  710. }
  711. return!frameFailed;
  712. }
  713. /**
  714. * Returns true if the actual color value is close to the expected color value. Updates
  715. * mLargestColorDelta.
  716. */
  717. boolean isColorClose(int actual,int expected){
  718. finalint MAX_DELTA=8;
  719. int delta=Math.abs(actual- expected);
  720. if(delta> mLargestColorDelta){
  721. mLargestColorDelta= delta;
  722. }
  723. return(delta<= MAX_DELTA);
  724. }
  725. /**
  726. * Generates the presentation time for frame N, in microseconds.
  727. */
  728. privatestaticlong computePresentationTime(int frameIndex){
  729. return123+ frameIndex*1000000/ FRAME_RATE;
  730. }
  731. /**
  732. * The elementary stream coming out of the "video/avc" encoder needs to be fed back into
  733. * the decoder one chunk at a time. If we just wrote the data to a file, we would lose
  734. * the information about chunk boundaries. This class stores the encoded data in memory,
  735. * retaining the chunk organization.
  736. */
  737. privatestaticclassVideoChunks{
  738. privateMediaFormat mMediaFormat;
  739. privateArrayList<byte[]> mChunks =newArrayList<byte[]>();
  740. privateArrayList<Integer> mFlags =newArrayList<Integer>();
  741. privateArrayList<Long> mTimes =newArrayList<Long>();
  742. /**
  743. * Sets the MediaFormat, for the benefit of a future decoder.
  744. */
  745. publicvoid setMediaFormat(MediaFormat format){
  746. mMediaFormat= format;
  747. }
  748. /**
  749. * Gets the MediaFormat that was used by the encoder.
  750. */
  751. publicMediaFormat getMediaFormat(){
  752. return mMediaFormat;
  753. }
  754. /**
  755. * Adds a new chunk. Advances buf.position to buf.limit.
  756. */
  757. publicvoid addChunk(ByteBuffer buf,int flags,long time){
  758. byte[] data=newbyte[buf.remaining()];
  759. buf.get(data);
  760. mChunks.add(data);
  761. mFlags.add(flags);
  762. mTimes.add(time);
  763. }
  764. /**
  765. * Returns the number of chunks currently held.
  766. */
  767. publicint getNumChunks(){
  768. return mChunks.size();
  769. }
  770. /**
  771. * Copies the data from chunk N into "dest". Advances dest.position.
  772. */
  773. publicvoid getChunkData(int chunk,ByteBuffer dest){
  774. byte[] data= mChunks.get(chunk);
  775. dest.put(data);
  776. }
  777. /**
  778. * Returns the flags associated with chunk N.
  779. */
  780. publicint getChunkFlags(int chunk){
  781. return mFlags.get(chunk);
  782. }
  783. /**
  784. * Returns the timestamp associated with chunk N.
  785. */
  786. publiclong getChunkTime(int chunk){
  787. return mTimes.get(chunk);
  788. }
  789. /**
  790. * Writes the chunks to a file as a contiguous stream. Useful for debugging.
  791. */
  792. publicvoid saveToFile(File file){
  793. Log.d(TAG,"saving chunk data to file "+ file);
  794. FileOutputStream fos=null;
  795. BufferedOutputStream bos=null;
  796. try{
  797. fos=newFileOutputStream(file);
  798. bos=newBufferedOutputStream(fos);
  799. fos=null;// closing bos will also close fos
  800. int numChunks= getNumChunks();
  801. for(int i=0; i< numChunks; i++){
  802. byte[] chunk= mChunks.get(i);
  803. bos.write(chunk);
  804. }
  805. }catch(IOException ioe){
  806. thrownewRuntimeException(ioe);
  807. }finally{
  808. try{
  809. if(bos!=null){
  810. bos.close();
  811. }
  812. if(fos!=null){
  813. fos.close();
  814. }
  815. }catch(IOException ioe){
  816. thrownewRuntimeException(ioe);
  817. }
  818. }
  819. }
  820. }
  821. }
0 0
原创粉丝点击