日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當(dāng)前位置: 首頁 > 人文社科 > 生活经验 >内容正文

生活经验

android ffmpeg 编码h264,Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2

發(fā)布時(shí)間:2023/11/27 生活经验 30 豆豆
生活随笔 收集整理的這篇文章主要介紹了 android ffmpeg 编码h264,Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2 小編覺得挺不錯(cuò)的,現(xiàn)在分享給大家,幫大家做個(gè)參考.

接上一篇。

activity_flv.xml

xmlns:tools="http://schemas.android.com/tools"

android:layout_width="match_parent"

android:layout_height="match_parent"

tools:context=".FlvActivity">

android:layout_width="match_parent"

android:layout_height="match_parent"

android:orientation="horizontal">

android:id="@+id/take_button"

android:layout_width="wrap_content"

android:layout_height="match_parent"

android:text="open"/>

android:id="@+id/surfaceView1"

android:layout_width="0dp"

android:layout_height="match_parent"

android:layout_weight="1"/>

最后偷懶貼個(gè)圖build.gradle

build.gradle

忘記上jni文件了,直接走起

#include

#include"libavcodec/avcodec.h"

#include"libavformat/avformat.h"

#include"libavutil/time.h"

#ifdefANDROID

#include

#include

#defineLOGE(format, ...)? __android_log_print(ANDROID_LOG_ERROR,"(>_

#defineLOGI(format, ...)? __android_log_print(ANDROID_LOG_INFO,"(=_=)",format, ##__VA_ARGS__)

#else

#define LOGE(format, ...)? printf("(>_

#define LOGI(format, ...)? printf("(^_^) "format"\n", ##__VA_ARGS__)

#endif

AVFormatContext*ofmt_ctx;

AVStream* video_st;

AVCodecContext* pCodecCtx;

AVCodec* pCodec;

AVPacketenc_pkt;

AVFrame*pFrameYUV;

intframecnt =0;

intyuv_width;

intyuv_height;

inty_length;

intuv_length;

int64_tstart_time;

//Output FFmpeg's av_log()

voidcustom_log(void*ptr,intlevel,const char* fmt,va_listvl){

FILE*fp=fopen("/storage/emulated/0/av_log.txt","a+");

if(fp){

vfprintf(fp,fmt,vl);

fflush(fp);

fclose(fp);

}

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_initial

(JNIEnv*env,jobjectobj,jintwidth,jintheight)

{

const char* out_path ="/storage/emulated/0/testffmpeg.flv";

yuv_width=width;

yuv_height=height;

y_length=width*height;

uv_length=width*height/4;

//FFmpeg av_log() callback

av_log_set_callback(custom_log);

av_register_all();

//output initialize

avformat_alloc_output_context2(&ofmt_ctx,NULL,"flv", out_path);

//output encoder initialize

pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);

if(!pCodec){

LOGE("Can not find encoder!\n");

return-1;

}

pCodecCtx = avcodec_alloc_context3(pCodec);

pCodecCtx->pix_fmt=AV_PIX_FMT_YUV420P;

pCodecCtx->width= width;

pCodecCtx->height= height;

pCodecCtx->time_base.num=1;

pCodecCtx->time_base.den=30;

pCodecCtx->bit_rate=800000;

pCodecCtx->gop_size=300;

/* Some formats want stream headers to be separate. */

if(ofmt_ctx->oformat->flags&AVFMT_GLOBALHEADER)

pCodecCtx->flags|=CODEC_FLAG_GLOBAL_HEADER;

//H264 codec param

//pCodecCtx->me_range = 16;

//pCodecCtx->max_qdiff = 4;

//pCodecCtx->qcompress = 0.6;

pCodecCtx->qmin=10;

pCodecCtx->qmax=51;

//Optional Param

pCodecCtx->max_b_frames=3;

// Set H264 preset and tune

AVDictionary*param =0;

av_dict_set(?m,"preset","ultrafast",0);

av_dict_set(?m,"tune","zerolatency",0);

if(avcodec_open2(pCodecCtx, pCodec, ?m) <0){

LOGE("Failed to open encoder!\n");

return-1;

}

//Add a new stream to output,should be called by the user before avformat_write_header() for muxing

video_st = avformat_new_stream(ofmt_ctx, pCodec);

if(video_st ==NULL){

return-1;

}

video_st->time_base.num=1;

video_st->time_base.den=30;

video_st->codec= pCodecCtx;

//Open output URL,set before avformat_write_header() for muxing

if(avio_open(&ofmt_ctx->pb, out_path,AVIO_FLAG_READ_WRITE) <0){

LOGE("Failed to open output file!\n");

return-1;

}

//Write File Header

avformat_write_header(ofmt_ctx,NULL);

start_time = av_gettime();

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_encode

(JNIEnv*env,jobjectobj,jbyteArrayyuv)

{

intret;

intenc_got_frame=0;

inti=0;

pFrameYUV = av_frame_alloc();

uint8_t*out_buffer = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

avpicture_fill((AVPicture*)pFrameYUV, out_buffer,AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

//��?����?����?NV21��?���?�����?��?YUV420P��?

jbyte* in= (jbyte*)(*env)->GetByteArrayElements(env,yuv,0);

memcpy(pFrameYUV->data[0],in,y_length);

for(i=0;i

{

*(pFrameYUV->data[2]+i)=*(in+y_length+i*2);

*(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);

}

pFrameYUV->format=AV_PIX_FMT_YUV420P;

pFrameYUV->width= yuv_width;

pFrameYUV->height= yuv_height;

enc_pkt.data=NULL;

enc_pkt.size=0;

av_init_packet(&enc_pkt);

ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);

av_frame_free(&pFrameYUV);

if(enc_got_frame ==1){

LOGI("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);

framecnt++;

enc_pkt.stream_index= video_st->index;

//Write PTS

AVRationaltime_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 = {60,2};//{ 50, 2 };

AVRationaltime_base_q = {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration = (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//�?�?���

//Parameters

//enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pts= av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts= enc_pkt.pts;

enc_pkt.duration= av_rescale_q(calc_duration, time_base_q, time_base);//(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pos= -1;

//Delay

int64_tpts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);

int64_tnow_time = av_gettime() - start_time;

if(pts_time > now_time)

av_usleep(pts_time - now_time);

ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

av_free_packet(&enc_pkt);

}

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_flush

(JNIEnv*env,jobjectobj)

{

intret;

intgot_frame;

AVPacketenc_pkt;

if(!(ofmt_ctx->streams[0]->codec->codec->capabilities&

CODEC_CAP_DELAY))

return0;

while(1) {

enc_pkt.data=NULL;

enc_pkt.size=0;

av_init_packet(&enc_pkt);

ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,

NULL, &got_frame);

if(ret <0)

break;

if(!got_frame){

ret =0;

break;

}

LOGI("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);

//Write PTS

AVRationaltime_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 = {60,2};

AVRationaltime_base_q = {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration = (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//�?�?���

//Parameters

enc_pkt.pts= av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts= enc_pkt.pts;

enc_pkt.duration= av_rescale_q(calc_duration, time_base_q, time_base);

//?��PTS/DTS��Convert PTS/DTS��

enc_pkt.pos= -1;

framecnt++;

ofmt_ctx->duration= enc_pkt.duration* framecnt;

/* mux encoded frame */

ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

if(ret <0)

break;

}

//Write file trailer

av_write_trailer(ofmt_ctx);

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_close

(JNIEnv*env,jobjectobj)

{

if(video_st)

avcodec_close(video_st->codec);

avio_close(ofmt_ctx->pb);

avformat_free_context(ofmt_ctx);

return0;

}

效果圖,生成的flv文件在sd卡根目錄下

Happy ending

總結(jié)

以上是生活随笔為你收集整理的android ffmpeg 编码h264,Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2的全部內(nèi)容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網(wǎng)站內(nèi)容還不錯(cuò),歡迎將生活随笔推薦給好友。