/boonya/article/details/7975309 1
/u 0 10029439/article/details/84754395
Thank you two great gods first!
You can refer to the source code of ffmpeg, doc\examples\filtering_video.c, an example of video filtering. For ffmpeg? \libavfilter\vf_eq.c, adjust brightness and contrast to enhance image quality.
Reference command line:
Ffmpeg.exe-I test.mp4-VF eq = contrast = 1: brightness = 0.1out.mp4.
Set brightness, contrast, saturation and approximate gamma adjustment.
The filter supports the following options:
contrast
Sets the contrast expression. The value must be a floating-point number between -2.0 and 2.0. The default value is 0.
clever
Sets the brightness expression. The value must be a floating-point number between-1.0- 1.0, and the default value is 0.
soak
Sets the saturation expression. The value must be a floating-point number between 0 and 3.0, and the default value is 1.
————————————————
Public unsafe video filtering {
# Region Class Member Variable
AVFilterGraph * m _ filter _ graph = null;
AVFilterContext * m _ buffer sink _ CTX = null;
AVFilterContext * m _ buffersrc _ CTX = null;
AVFrame * m _ filt _ frame = null
Object m _ lock _ record = new Object(); ?
#endregion?
public int Init(int width,int height,int contrast,int brightness) {
Lock (m_lock_record) {
//Key code snippet?
if (m_filter_graph! = null)?
return- 1; ?
Contrast = contrast & lt 1? 1: comparison;
contrast = contrast & gt9 ? 9: comparison;
Brightness = brightness & lt 1? 1: brightness;
Brightness = Brightness & gt9? 9: brightness;
float contrast _ f = 1+((float)(contrast-5))/ 10;
float brightness _ f = 0+((float)(brightness-5))/ 10;
String Filter _ Description = String. format(" eq = contrast = "+contrast _ f . ToString()+":brightness = "+brightness _ f . ToString());
Returns init_filters(width, height, filters _ descr);
}
}
Public int Reset(int width, int height, int contrast, int brightness) {
deinit(); ?
Returns Init (width, height, contrast, brightness); ?
}
public int Filter(av frame * frame _ src,AVFrame **frame_dst)? {?
Lock (m_lock_record){
* frame _dst = frame _ src?
if (m_filter_graph == null) {
return- 1; ?
}
int ret?
//AV_BUFFERSRC_FLAG_KEEP_REF = 8,?
ret = ffmpeg . av _ buffersrc _ add _ frame _ flags(m _ buffersrc _ CTX,frame_src,8);
if(ret & lt; 0)?
Return to ret?
ret = ffmpeg . av _ buffer sink _ get _ frame(m _ buffer sink _ CTX,m _ filt _ frame); ?
if(ret & lt; 0)
Return to ret?
* frame _ dst = m _ filt _ frame; ?
Returns 0; ?
}?
}
public void UnrefFrame() {
Lock (m_lock_record) {
if (m_filter_graph == null)?
Return; ?
ffmpeg . av _ frame _ unref(m _ filt _ frame); ?
}?
}?
public void Deinit()? {?
if (m_filter_graph == null)?
Return; ?
if(m_filter_graph! = null)? {?
Fixed (avfiltergraph * * filter _ graph =&; m_filter_graph)?
ffmpeg . av filter _ graph _ free(filter _ graph); ?
}?
if (m_filt_frame! = null)? {?
Fixed (avframe * * filter _ frame = & ampm _ filter _ frame)?
ffmpeg . av _ frame _ free(filt _ frame); ?
}?
}?
private int init _ filters(int width,int height,string filters_descr) {
int ret = 0; ?
ffmpeg . av filter _ register _ all(); ?
//AVPixelFormat。 AV _ PIX _ FMT _ YUV 420 p = 0; ?
String parameter = string. Format("video_size=" + width。 ToString() + "x" + height。 ToString()+":pix _ fmt = 0:time _ base = 1/20 "); ?
av filter * buffer src = ffmpeg . av filter _ get _ by _ name(" buffer "); ?
av filter * buffer sink = ffmpeg . av filter _ get _ by _ name(" buffer sink "); ?
AVFilterInOut * outputs = ffmpeg . av filter _ inout _ alloc(); ?
AVFilterInOut * inputs = ffmpeg . av filter _ inout _ alloc(); ?
//av rational time _ base; ?
int * pix _ fmts =(int *)ffmpeg . av _ malloc(8); ?
pix_fmts[0] = (int)AVPixelFormat。 AV _ PIX _ FMT _ YUV 420 p; ?
pix _ fmts[ 1]=(int)AVPixelFormat。 AV _ PIX _ FMT _ none; ?
//AVPixelFormat pix _ fmts[]= { AVPixelFormat。 AV_PIX_FMT_YUV420P,AVPixelFormat。 AV _ PIX _ FMT _ none; ?
m _ filter _ graph = ffmpeg . av filter _ graph _ alloc(); ?
if(outputs = = null | | inputs = = null | | m _ filter _ graph = = null)? {?
ret =- 1; ?
Go to the end; ?
}?
fixed(AVFilterContext * * buffersrc _ CTX = & amp; m_buffersrc_ctx) {?
ret = ffmpeg . av filter _ graph _ create _ filter(buffersrc _ CTX,buffer src," in ",args,null,m _ filter _ graph); ?
if(ret & lt; 0)? {?
Go to the end; ?
}?
}?
fixed(AVFilterContext * * buffer sink _ CTX = & amp; m_buffersink_ctx) {?
ret = ffmpeg . av filter _ graph _ create _ filter(buffersink _ CTX,buffer sink," out ",null,null,m _ filter _ graph); ?
if(ret & lt; 0){?
Go to the end; ?
}?
}
Int size = (int) ffmpeg.av _ int _ list _ length _ for _ size (1,(void*)pix_fmts, not checked ((ulong)AVPixelFormat. AV _ PIX _ FMT _ none); ?
ret = ffmpeg . av _ opt _ set _ bin(m _ buffer sink _ CTX," pix_fmts ",(byte*)pix_fmts,size,ffmpeg。 AV _ OPT _ SEARCH _ CHILDREN); ?
if(ret & lt; 0)?
Go to the end; ?
Output->; name = ffmpeg . av _ strdup(" in "); ?
Output->; filter _ ctx = m _ buffersrc _ ctx?
Output->; pad _ idx = 0; ?
Output->; next = null?
Input->; name = ffmpeg . av _ strdup(" out "); ?
Input->; filter _ ctx = m _ buffersink _ ctx?
Input->; pad _ idx = 0; ?
Input->; next = null?
Ret = ffmpeg. avfilter _ graph _ parse _ ptr (m _ filter _ graph, filters_descr, & input and output. Output, empty); ?
if(ret & lt; 0)?
Go to the end; ?
ret = ffmpeg . av filter _ graph _ config(m _ filter _ graph,null); ?
if(ret & lt; 0)?
Go to the end; ?
m _ filt _ frame = ffmpeg . av _ frame _ alloc(); ?
end:?
ffmpeg . av filter _ inout _ free(& amp; Input); ?
ffmpeg . av filter _ inout _ free(& amp; Output); ?
ffmpeg . av _ free(pix _ fmts); ?
if(ret & lt; 0)
deinit();
Return to ret
}
}
After verification, the effect is obvious, and you can try to modify the parameters according to your own needs.