基于WebAssembly无插件解码H264/H265码流播放器
之前看到一篇文章:web无插件解码播放H264/H265(WebAssembly解码HTML5播放)
H.265/HEVC在Web视频播放的实践
这里写目录标题
按照文章思路,已经复现了web端无插件解码H265码流。首先说明下我这边的环境,框架。
在海思主板上移植了web服务器nginx,用于pc端请求网页资源,然后用了两个线程,一个线程海思编码,一个线程websocket协议服务器推送H265码流到web端,web端收到了码流数据,调用ffmpeg编译好的js库软解码H265数据,webgl渲染yuv数据。
一.emsdk工具的安装
https://www.cnblogs.com/tonghaolang/p/9253719.html Windows下安装enscripten
Windows 和Linux 安装 emsdk
https://blog.csdn.net/qq_34754747/article/details/103815349?depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-11&utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-11
安装emsdk工具
wget https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz
二.emsdk编译ffmpeg
下载ffmpeg源码,用下面命令在linux平台编译得到ffmpeg静态库文件。
emconfigure ./configure --cc="emcc"--prefix=/opt/tar-download/emccffmpeg \
--enable-cross-compile --target-os=none --arch=x86_32 --cpu=generic --disable-ffplay --disable-ffprobe \
--disable-asm--disable-doc --disable-devices --disable-pthreads --disable-w32threads --disable-network \
--disable-hwaccels --disable-parsers --disable-bsfs --disable-debug --disable-protocols --disable-indevs \
--disable-outdevs --enable-protocol=file --enable-decoder=hevc --enable-decoder=h264
三.ffmpeg解码库
ffmpeg.h
#ifndef_FFMPEG_H_#define_FFMPEG_H_#include<stdio.h>#include<stdlib.h>#include<string.h>#include<unistd.h>#include<sys/stat.h>#include<sys/types.h>#include<fcntl.h>#include"libswscale/swscale.h"#include"libavutil/opt.h"#include"libavutil/imgutils.h"#include"libavcodec/avcodec.h"#defineQUEUE_SIZE20//图片数据typedefstruct_imgdata{int width;int height;uint8_t* data;}Imgdata;//解码APIexternintinit_decoder(int codeid);//初始化解码器extern Imgdata*decoder_raw(char* framedata,int framesize );//解码externuint8_t*process(AVFrame* frame,uint8_t* buff);//yuvdata 保存在buff中externvoidfree_decoder();//关闭解码器#endif
ffmpeg.c
#include"ffmpeg.h"
AVCodec* codec;//记录编码详细信息
AVCodecParserContext* parser =NULL;
AVCodecContext* c =NULL;//记录码流编码信息
AVPacket* pkt =NULL;
AVFrame* frame =NULL;//存储视频中的帧
Imgdata *imgbuff =NULL;staticint codeId =0;/**********************************************************
* 功能: 初始化解码器
* 参数: codeid---> 1=打开h.265解码器
* 0=打开h.264解码器
*
* 返回值:0=成功初始化
* -1=初始化失败
*
* *******************************************************/intinit_decoder(int codeid){
codeId = codeid;switch(codeid){case0:{
codec =avcodec_find_decoder(AV_CODEC_ID_H264);break;}case1:{
codec =avcodec_find_decoder(AV_CODEC_ID_H265);break;}default:{printf("In C language no choose h.264/h.265 decoder\n");return-1;}}if(NULL== codec){printf("codec find decoder failed\n");return-1;}
parser =av_parser_init(codec->id);if(NULL== parser){printf("can't jsparser init!\n");return-1;}
c =avcodec_alloc_context3(codec);if(NULL== c){printf("can't allocate video jscodec context!\n");return-1;}if(avcodec_open2(c, codec,NULL)<0){printf("can't open codec\n");return-1;}
frame =av_frame_alloc();if(NULL== frame){printf("can't allocate video frame\n");return-1;}
pkt =av_packet_alloc();if(NULL== pkt){printf("can't allocate video packet\n");return-1;}
imgbuff =(Imgdata*)malloc(sizeof(Imgdata));if(NULL== imgbuff){printf("malloc imgbuff failed\n");return-1;}printf("In C language init decoder success!\n");return0;}/**********************************************************
* 功能: 解码,将h.265/h.264裸流解码为yuv420p数据
* 参数: framedata---->裸流数据
* framesize---->裸流长度
*
* 返回值:Imgdata的结构体 Imgdata->width 视频宽度
* Imgdata->height 视频高度
* Imgdata->data yuv420p数据
*
* *******************************************************/
Imgdata*decoder_raw(char* framedata,int framesize ){if(NULL== framedata || framesize <=0){printf("In C language framedata is null or framesize is zero\n");returnNULL;}/*****************************************************
* 过滤,第一次检测首帧是否是VPS/SPS,若不是跳过不解码
第一次检测到首帧是VPS/SPS,后面不用检测,flag=0 --> flag=1
*
* **************************************************/int nut =0;staticint flag =0;if(codeId ==0)//裸流h.264{
nut =(char)framedata[4]&0x1f;while(!flag){if(7==nut ){//PPS
flag =1;break;}elseif(8==nut )//PPS{returnNULL;}elseif(5==nut )//I{returnNULL;}elseif(1==nut )//P{returnNULL;}elseif(9==nut )//AUD{returnNULL;}elseif(6==nut )//SEI{returnNULL;}else//Unknown{returnNULL;}}}else{//h.265裸流
nut =(char)(framedata[4]>>1)&0x3f;while(!flag){if(nut ==32)//VPS {
flag =1;break;}elseif(nut ==33)//SPS{returnNULL;}elseif(nut ==34)//PPS{returnNULL;}elseif(nut ==19)//I帧{returnNULL;}elseif(nut ==1)//P帧{returnNULL;}elseif(nut ==35)//AUD{returnNULL;}elseif(nut ==39)//SEI{returnNULL;}else//Unknown {returnNULL;}}}//开始解码int ret =0;int size =0;while(framesize >0){
size =av_parser_parse2(parser, c,&pkt->data,&pkt->size,(constuint8_t*)framedata, framesize,
AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);if(size <0){printf("Error while parsing\n");returnNULL;}
framedata += size;
framesize -= size;if(pkt->size){
ret =avcodec_send_packet(c, pkt);if(ret <0){printf("In C language error sending a packet for decoding\n");returnNULL;}else{while(ret >=0){
ret =avcodec_receive_frame(c, frame);if(ret ==AVERROR(EAGAIN)|| ret == AVERROR_EOF){break;}elseif(ret <0){printf("Error during decoding\n");returnNULL;}//yuv数据处理,保存在buff中,在网页中调用yuv420p数据交给webgl渲染int yuvBytes =av_image_get_buffer_size(AV_PIX_FMT_YUV420P, frame->width, frame->height,1);uint8_t*buff =(uint8_t*)malloc(yuvBytes);if(buff ==NULL){printf("malloc yuvBytes failed!\n");returnNULL;}
imgbuff->width = frame->width;
imgbuff->height = frame->height;
imgbuff->data =process(frame, buff);free(buff);}}}}return imgbuff;}/******************************************************
* 功能: 将一帧yuvdata 保存为yuv buff
* 参数: frame---->yuvframe
* buff----->保存数据buff
*
* 返回值: buff yuv420p数据
* ***************************************************/uint8_t*process(AVFrame* frame,uint8_t* buff){int i, j, k;for(i=0; i<frame->height; i++){memcpy(buff + frame->width*i, frame->data[0]+frame->linesize[0]*i, frame->width);}for(j=0; j<frame->height/2; j++){memcpy(buff + frame->width*i + frame->width/2*j, frame->data[1]+ frame->linesize[1]*j, frame->width/2);}for(k=0; k<frame->height/2; k++){memcpy(buff + frame->width*i +frame->width/2*j + frame->width/2*k, frame->data[2]+frame->linesize[2]*k, frame->width/2);}return buff;}/*****************************************************
* 功能:关闭解码器,释放内存
* 参数:
*
* 返回值:无
* **************************************************/voidfree_decoder(){av_parser_close(parser);avcodec_free_context(&c);av_frame_free(&frame);av_packet_free(&pkt);if(imgbuff !=NULL){if(imgbuff->data !=NULL){free(imgbuff->data);
imgbuff->data =NULL;}free(imgbuff);
imgbuff =NULL;}printf("In C language free all requested memory! \n");}
test.c
#include<stdio.h>#include"ffmpeg.h"intmain(int argc,constchar**argv){return0;}
四.执行Makefile.sh脚本,生成ffmpeg.js ffmpeg.wasm解码库
Makefile.sh
echo "start building"
export TOTAL_MEMORY=67108864
export EXPORTED_FUNCTIONS="[ \
'_init_decoder', \
'_decoder_raw', \
'_free_decoder', \
'_main']"
emcc test.c ffmpeg.c ../lib/libavcodec.a ../lib/libavutil.a ../lib/libswscale.a \
-O2 \
-I "../inc" \
-s WASM=1 \
-s USE_PTHREADS=1 \
-s TOTAL_MEMORY=${TOTAL_MEMORY} \
-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \
-s EXTRA_EXPORTED_RUNTIME_METHODS="['addFunction']" \
-s RESERVED_FUNCTION_POINTERS=14 \
-s FORCE_FILESYSTEM=1 \
-o ../js/libffmpeg.js
echo "finish building"
五.实现websocket协议服务器,推送码流
参考文章:WebSocket的C++服务器端实现
WebSocket协议解析
/*******************************************************************************
* 名称: server_thread_fun
* 功能: 线程管理websocket服务器
* 形参: arg: WebSocket_server 结构体
*
*
* 返回: 无
* 说明: 无
******************************************************************************/voidserver_thread_fun(void*arg){int ret, j;int cfd;char ip[16]="";structsockaddr_in serverAddr;structsockaddr_in clientAddr;socklen_t client_len =sizeof(clientAddr);
WebSocket_server *wss =(WebSocket_server *)arg;memset(&serverAddr,0,sizeof(serverAddr));
serverAddr.sin_family = AF_INET;
serverAddr.sin_port =htons(wss->port);
serverAddr.sin_addr.s_addr = INADDR_ANY;
wss->fd =socket(AF_INET, SOCK_STREAM,0);if(wss->fd <0){printf("server can't create socket\n");exit(1);}//设置端口复用,绑定监听int opt =1;setsockopt(wss->fd, SOL_SOCKET, SO_REUSEADDR,&opt,sizeof(opt));bind(wss->fd,(structsockaddr*)&serverAddr,sizeof(serverAddr));listen(wss->fd,128);//创建一个句柄int epfd =epoll_create(1);if(epfd <0){perror("");exit(1);}structepoll_event ev, evs[1024];
ev.data.fd = wss->fd;
ev.events = EPOLLIN | EPOLLET;epoll_ctl(epfd, EPOLL_CTL_ADD, wss->fd,&ev);while(1){int n =epoll_wait(epfd, evs,1024,-1);//epoll监听if( n <0){printf("server start epoll_wait failed\n");exit(1);}for(j=0; j<n; j++){if((evs[j].events & EPOLLERR)||(evs[j].events & EPOLLHUP))//epoll连接错误{//printf("epoll error accept close:%d \n",evs[j].data.fd);
ev.data.fd = evs[j].data.fd;if(epoll_ctl(epfd, EPOLL_CTL_DEL, evs[j].data.fd,&ev)<0){printf("server epoll_ctl:EPOLL_CTL_DEL failed\n");close(epfd);exit(1);}arrayRemoveItem(wss->client_fd_array, EPOLL_RESPOND_NUM, evs[j].data.fd);//client close 从数组中移除fdclose(evs[j].data.fd);}elseif(evs[j].data.fd == wss->fd)//接入客户端{
cfd =accept(wss->fd,(structsockaddr*)&clientAddr,&client_len);//提取新的连接if(cfd >=0){
ev.data.fd = cfd;epoll_ctl(epfd, EPOLL_CTL_ADD, cfd,&ev);}//printf("websocketV2 client ip=%s port=%d fd=%d\n",inet_ntop(AF_INET, &clientAddr.sin_addr.s_addr, ip, 16),ntohs(clientAddr.sin_port),cfd);}elseif(evs[j].events & EPOLLIN)//接收数据事件{memset(wss->buf,0,sizeof(wss->buf));
ret = wss->callback(wss, evs[j].data.fd, wss->buf,sizeof(wss->buf));//回调函数if(ret <=0)//ret<0时,检查异常,决定是否主动解除连接{if( errno >0&&(errno == EAGAIN || errno == EINTR));else{//printf("accept close=%d errno =%d\n",evs[j].data.fd,errno);
ev.data.fd = evs[j].data.fd;if(epoll_ctl(epfd, EPOLL_CTL_DEL, evs[j].data.fd,&ev)<0){printf("server epoll_ctl:EPOLL_CTL_DEL failed\n");close(epfd);exit(1);}arrayRemoveItem(wss->client_fd_array, EPOLL_RESPOND_NUM, evs[j].data.fd);//client close 从数组中移除fdclose(evs[j].data.fd);}}}elseif(evs[j].events & EPOLLOUT)//发送数据事件;}}close(epfd);//关闭句柄close(wss->fd);}/*******************************************************************************
* 名称: server_thread_fun
* 功能: 回调函数
* 形参: wss:WebSocket_server结构体
* fd:句柄
* buf:发送数据buf
* buflen: 数据大小
*
* 返回: 无
* 说明: 无
******************************************************************************/int ret1=0;intserver_callback(WebSocket_server *wss,int fd,char*buf,unsignedint buflen){
ret1 =webSocket_recv(fd, buf, buflen,NULL);//接收客户端发过来的数据if(ret1 >0){#if0printf(" client->server recv msg fd=%d mgs=%s\n", fd, buf);if(strstr(buf,"hi~")!=NULL)
ret1 =webSocket_send(fd,"I am server",strlen("I am server"), false, WDT_TXTDATA);#endif}elseif( ret1 <0){if(strncmp(buf,"GET",3)==0){
ret1 =webSocket_serverLinkToClient(fd, buf, ret1);//服务器连接客户端if(ret1 >0){arrayAddItem(wss->client_fd_array,EPOLL_RESPOND_NUM, fd );//连接成功后将fd添加到数组中}}}return ret1;}/*******************************************************************************
* 名称: arrayAddItem
* 功能: 添加cfd到数组
* 形参: array:二维数组
* arraySize: 句柄数量
* value: cfd
* 返回:
* 说明: 无
******************************************************************************/intarrayAddItem(int array[][2],int arraySize,int value){int i;for(i=0; i<arraySize; i++){if(array[i][1]==0){
array[i][0]= value;
array[i][1]=1;return0;}}return-1;}/*******************************************************************************
* 名称: arrayRemoveItem
* 功能: 从数组中删除cfd
* 形参: array:二维数组
* arraySize: 句柄数量
* value: cfd
* 返回:
* 说明: 无
******************************************************************************/intarrayRemoveItem(int array[][2],int arraySize,int value){int i =0;for(i=0; i<arraySize; i++){if(array[i][0]== value){
array[i][0]=0;
array[i][1]=0;return0;}}return-1;}
六.web端实现
webgl.js
functionTexture(gl){this.gl = gl;this.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,this.texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);}Texture.prototype.bind=function(n, program, name){var gl =this.gl;
gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]);
gl.bindTexture(gl.TEXTURE_2D,this.texture);
gl.uniform1i(gl.getUniformLocation(program, name), n);};Texture.prototype.fill=function(width, height, data){var gl =this.gl;
gl.bindTexture(gl.TEXTURE_2D,this.texture);
gl.texImage2D(gl.TEXTURE_2D,0, gl.LUMINANCE, width, height,0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);};functionWebGLPlayer(canvas, options){this.canvas = canvas;this.gl = canvas.getContext("webgl")|| canvas.getContext("experimental-webgl");this.initGL(options);}WebGLPlayer.prototype.initGL=function(options){if(!this.gl){
console.log("[ER] WebGL not supported.");return;}var gl =this.gl;
gl.pixelStorei(gl.UNPACK_ALIGNMENT,1);var program = gl.createProgram();var vertexShaderSource =["attribute highp vec4 aVertexPosition;","attribute vec2 aTextureCoord;","varying highp vec2 vTextureCoord;","void main(void) {"," gl_Position = aVertexPosition;"," vTextureCoord = aTextureCoord;","}"].join("\n");var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);var fragmentShaderSource =["precision highp float;","varying lowp vec2 vTextureCoord;","uniform sampler2D YTexture;","uniform sampler2D UTexture;","uniform sampler2D VTexture;","const mat4 YUV2RGB = mat4","("," 1.1643828125, 0, 1.59602734375, -.87078515625,"," 1.1643828125, -.39176171875, -.81296875, .52959375,"," 1.1643828125, 2.017234375, 0, -1.081390625,"," 0, 0, 0, 1",");","void main(void) {"," gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;","}"].join("\n");var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);if(!gl.getProgramParameter(program, gl.LINK_STATUS)){
console.log("[ER] Shader link failed.");}var vertexPositionAttribute = gl.getAttribLocation(program,"aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);var textureCoordAttribute = gl.getAttribLocation(program,"aTextureCoord");
gl.enableVertexAttribArray(textureCoordAttribute);var verticesBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer);
gl.bufferData(gl.ARRAY_BUFFER,newFloat32Array([1.0,1.0,0.0,-1.0,1.0,0.0,1.0,-1.0,0.0,-1.0,-1.0,0.0]), gl.STATIC_DRAW);
gl.vertexAttribPointer(vertexPositionAttribute,3, gl.FLOAT,false,0,0);var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER,newFloat32Array([1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0]), gl.STATIC_DRAW);
gl.vertexAttribPointer(textureCoordAttribute,2, gl.FLOAT,false,0,0);
gl.y =newTexture(gl);
gl.u =newTexture(gl);
gl.v =newTexture(gl);
gl.y.bind(0, program,"YTexture");
gl.u.bind(1, program,"UTexture");
gl.v.bind(2, program,"VTexture");}WebGLPlayer.prototype.renderFrame=function(videoFrame, width, height, uOffset, vOffset){if(!this.gl){
console.log("[ER] Render frame failed due to WebGL not supported.");return;}var gl =this.gl;
gl.viewport(0,0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0.0,0.0,0.0,0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.y.fill(width, height, videoFrame.subarray(0, uOffset));
gl.u.fill(width >>1, height >>1, videoFrame.subarray(uOffset, uOffset + vOffset));
gl.v.fill(width >>1, height >>1, videoFrame.subarray(uOffset + vOffset, videoFrame.length));
gl.drawArrays(gl.TRIANGLE_STRIP,0,4);};WebGLPlayer.prototype.fullscreen=function(){var canvas =this.canvas;if(canvas.RequestFullScreen){
canvas.RequestFullScreen();}elseif(canvas.webkitRequestFullScreen){
canvas.webkitRequestFullScreen();}elseif(canvas.mozRequestFullScreen){
canvas.mozRequestFullScreen();}elseif(canvas.msRequestFullscreen){
canvas.msRequestFullscreen();}else{alert("This browser doesn't supporter fullscreen");}};WebGLPlayer.prototype.exitfullscreen=function(){if(document.exitFullscreen){
document.exitFullscreen();}elseif(document.webkitExitFullscreen){
document.webkitExitFullscreen();}elseif(document.mozCancelFullScreen){
document.mozCancelFullScreen();}elseif(document.msExitFullscreen){
document.msExitFullscreen();}else{alert("Exit fullscreen doesn't work");}}
video.js //websocket接收端
var canvas;var webglPlayer;var width =0;var height =0;var socketurl ="ws://192.168.1.213:8000";//websocket服务器ip,port//codeid=1,选择解码H.265裸流functionplayH265(){play(1);}//codeid=0,选择解码H.264裸流functionplayH264(){play(0);}functionplay(codeid){//创建websocket对象 var websocket =newWebSocket(socketurl);
websocket.binaryType ="arraybuffer";//设置客户端接收数据格式//浏览器-->websocket服务器 发送数据建立连接
websocket.onopen=function(){//判断连接服务器状态if(websocket.readyState ==0){alert("browser connect websocket server failed,please reconnect");return;}elseif(websocket.readyState ==1){
console.log("In javascript language connect websocket server success!");}
websocket.send("connect websocket server");//向服务器发送数据//初始化解码器 1=h265 0=h264var ret = Module._init_decoder(codeid);if(ret !=0){
console.log("init decoder failed!");return;}};//接收websocket服务器数据let ptr;
websocket.onmessage=function(evt){let buff =null;let size =0;
size = evt.data.byteLength;
buff =newUint8Array(evt.data);//evt.data里包含裸流数据以ArrayBuffer对象形式存放let buf = Module._malloc(size);//手机端不支持这样使用函数,webAssembly浏览器原因不兼容
Module.HEAPU8.set(buff, buf);//buf是裸流数据
ptr = Module._decoder_raw(buf, size);//ffmpeg解码
width = Module.HEAPU32[ptr/4];//Webgl渲染yuv420p数据
height = Module.HEAPU32[ptr/4+1];let imgbuffptr = Module.HEAPU32[ptr/4+2];let imgbuff = Module.HEAPU8.subarray(imgbuffptr, imgbuffptr + width*height*3/2);showYuv420pdata(width, height, imgbuff);if(buf!=null)//释放内存{
Module._free(buf);
buf=null;}};//websocket关闭连接
websocket.onclose=function(){//关闭解码器,释放内存
Module._free_decoder();
console.log("In javascript language websocket client close !");};//websocket通信发生错误连接
websocket.onerror=function(){alert("websocket 服务器未启动!");return;}}/***************************************************************
webgl渲染yuv420p数据
**************************************************************/functionshowYuv420pdata(width, height, data){let ylenght = width*height;let uvlenght = width*height/4;if(!webglPlayer){const canvasId ="canvas";
canvas = document.getElementById(canvasId);
webglPlayer =newWebGLPlayer(canvas,{preserveDrawingBuffer:false});}
webglPlayer.renderFrame(data, width, height, ylenght,uvlenght);}/***************************************************************
webgl方法实现全屏功能
**************************************************************/functionfullscreen(){if(!webglPlayer){alert("没有渲染webgl暂时不能全屏");return;}
webglPlayer.fullscreen();}
七.结果
由于2020年6月份做的,当时忘记截图记录了,没有图片。说下这种方案实现效果,测试结果。
解码1080p 30fps的H265码流, 延迟300~450ms,
解码4k 30fps的H265码流,播放越久延迟越重,不行。适合1080p分辨率以下的。
版权归原作者 国服卤鸡蛋 所有, 如有侵权,请联系我们删除。