////////////////////////////////////////YUV 420 byte array//////////////////////////////////////////////////////
private byte[] getDataFromImage(Image image, int colorFormat) {
if (colorFormat != COLOR_FormatI420 && colorFormat != COLOR_FormatNV21) {
throw new IllegalArgumentException("only support COLOR_FormatI420 " + "and COLOR_FormatNV21");
}
if (!isImageFormatSupported(image)) {
throw new RuntimeException("can't convert Image to byte array, format " + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
if (VERBOSE)Log.d("TAG_L", "get data from " + planes.length + " planes");
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if (colorFormat == COLOR_FormatI420) {
channelOffset = width * height;
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height + 1;
outputStride = 2;
}
break;
case 2:
if (colorFormat == COLOR_FormatI420) {
channelOffset = (int) (width * height * 1.25);
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height;
outputStride = 2;
}
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
if (VERBOSE) {
Log.v(TAG, "pixelStride " + pixelStride);
Log.v(TAG, "rowStride " + rowStride);
Log.v(TAG, "width " + width);
Log.v(TAG, "height " + height);
Log.v(TAG, "buffer size " + buffer.remaining());
}
//if i==0 fhift=0 else shift=1
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
public void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
List<caulate> th_list=new ArrayList<>();
//int yIndex = 0;
//int uvIndex = frameSize;
//數字越大,啟用的thread越多,越多thread不一定越快
int split=2;//2的N次方
int dh=height/split;
//使用多個thread處理YUV420SP
for (int i=0;i<height;i=i+dh ){
int end=i+dh;
if(end>height){end=height;}
caulate ac=new caulate(yuv420sp, argb, width, height,i,end);
th_list.add(ac);
}
//當所有thread都結束
for(int j=0;j<th_list.size();j++){
while (th_list.get(j).isAlive()){}
}
}
private class caulate extends Thread{
int[] argb;
byte[] yuv420sp;
int width;
int height;
int jbegin;
int jend;
public caulate(byte[] myuv420sp, int[] margb, int mwidth, int mheight,int mjbegin,int mjend){
yuv420sp=myuv420sp;
argb=margb;
width=mwidth;
height=mheight;
jend=mjend;
jbegin=mjbegin;
this.start();
}
@Override
public void run(){
final int frameSize = width * height;
//int yIndex = 0;
//int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
//int index = 0;
for (int j =jbegin ; j < jend; j++) {
for (int i = 0; i < width; i++) {
int num=i+j*width;
//BMP是BGR 我們把R,B對調
//a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
//R = (argb[index] & 0xff0000) >> 16;
//G = (argb[index] & 0xff00) >> 8;
//B = (argb[index] & 0xff) >> 0;
B=Color.red(argb[num]);
G=Color.green(argb[num]);
R=Color.blue(argb[num]);
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[num] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
public class surface_mediacodec_advance extends MediaCodec.Callback{
//private Boolean MUXER_START = false;
private MediaProjection mp;
private AtomicBoolean mQuit = new AtomicBoolean(false);
private int mscreendi;
private VirtualDisplay vp;
private String save_path = "/sdcard/capture.mp4";
private check_file_save_finish mcheck_file_save_finish;
private long lastpresentationTimeUs;
private int VIDEO_KEY_BIT_RATE = 4000000;//2000000 4000000
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30 fps
private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
private static final int TIMEOUT_US = 10000;
private int CH_TYPE;
//使用者已許可MediaProjection mp
public surface_mediacodec_advance(int windowWidth, int windowHeight, int sdi, MediaProjection m, int audio_sample_rate, int ch) {
CH_TYPE=ch;
//初始化MediaCodec
mwindowWidth = windowWidth;
mwindowHeight = windowHeight;
mp = m;
mscreendi = sdi;
try {
mMediaMuxer= new MediaMuxer(save_path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);