21
21
import org .webrtc .VideoFrame ;
22
22
import org .webrtc .VideoFrameDrawer ;
23
23
import org .webrtc .VideoSink ;
24
+ import org .webrtc .YuvHelper ;
24
25
25
26
import java .nio .ByteBuffer ;
26
27
@@ -121,7 +122,7 @@ public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] pl
121
122
}
122
123
// Allocate copy buffer if necessary.
123
124
if (copyCapacityNeeded > 0
124
- && (copyBuffer == null || copyBuffer .capacity () < copyCapacityNeeded )) {
125
+ && (copyBuffer == null || copyBuffer .capacity () < copyCapacityNeeded )) {
125
126
copyBuffer = ByteBuffer .allocateDirect (copyCapacityNeeded );
126
127
}
127
128
// Make sure YUV textures are allocated.
@@ -141,11 +142,12 @@ public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] pl
141
142
// Input is packed already.
142
143
packedByteBuffer = planes [i ];
143
144
} else {
144
- Log .e (TAG , "Unpacked YUV buffer found" );
145
- throw new RuntimeException ();
145
+ YuvHelper .copyPlane (
146
+ planes [i ], strides [i ], copyBuffer , planeWidths [i ], planeWidths [i ], planeHeights [i ]);
147
+ packedByteBuffer = copyBuffer ;
146
148
}
147
149
GLES20 .glTexImage2D (GLES20 .GL_TEXTURE_2D , 0 , GLES20 .GL_LUMINANCE , planeWidths [i ],
148
- planeHeights [i ], 0 , GLES20 .GL_LUMINANCE , GLES20 .GL_UNSIGNED_BYTE , packedByteBuffer );
150
+ planeHeights [i ], 0 , GLES20 .GL_LUMINANCE , GLES20 .GL_UNSIGNED_BYTE , packedByteBuffer );
149
151
}
150
152
return yuvTextures ;
151
153
}
0 commit comments