2 * Copyright (c) 2018 Paul B Mahol
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 #include "libavutil/imgutils.h"
22 #include "libavutil/mem.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/pixdesc.h"
30 typedef struct AmplifyContext
{
32 const AVPixFmtDescriptor
*desc
;
52 static const enum AVPixelFormat pixel_fmts
[] = {
53 AV_PIX_FMT_GRAY8
, AV_PIX_FMT_GRAY9
,
54 AV_PIX_FMT_GRAY10
, AV_PIX_FMT_GRAY12
, AV_PIX_FMT_GRAY14
,
55 AV_PIX_FMT_GRAY16
, AV_PIX_FMT_GRAYF32
,
56 AV_PIX_FMT_YUV410P
, AV_PIX_FMT_YUV411P
,
57 AV_PIX_FMT_YUV420P
, AV_PIX_FMT_YUV422P
,
58 AV_PIX_FMT_YUV440P
, AV_PIX_FMT_YUV444P
,
59 AV_PIX_FMT_YUVJ420P
, AV_PIX_FMT_YUVJ422P
,
60 AV_PIX_FMT_YUVJ440P
, AV_PIX_FMT_YUVJ444P
,
62 AV_PIX_FMT_YUV420P9
, AV_PIX_FMT_YUV422P9
, AV_PIX_FMT_YUV444P9
,
63 AV_PIX_FMT_YUV420P10
, AV_PIX_FMT_YUV422P10
, AV_PIX_FMT_YUV444P10
,
65 AV_PIX_FMT_YUV444P12
, AV_PIX_FMT_YUV422P12
, AV_PIX_FMT_YUV420P12
,
67 AV_PIX_FMT_YUV444P14
, AV_PIX_FMT_YUV422P14
, AV_PIX_FMT_YUV420P14
,
68 AV_PIX_FMT_YUV420P16
, AV_PIX_FMT_YUV422P16
, AV_PIX_FMT_YUV444P16
,
69 AV_PIX_FMT_GBRP
, AV_PIX_FMT_GBRP9
, AV_PIX_FMT_GBRP10
,
70 AV_PIX_FMT_GBRP12
, AV_PIX_FMT_GBRP14
, AV_PIX_FMT_GBRP16
,
71 AV_PIX_FMT_YUVA420P
, AV_PIX_FMT_YUVA422P
, AV_PIX_FMT_YUVA444P
,
72 AV_PIX_FMT_YUVA444P9
, AV_PIX_FMT_YUVA444P10
, AV_PIX_FMT_YUVA444P12
, AV_PIX_FMT_YUVA444P16
,
73 AV_PIX_FMT_YUVA422P9
, AV_PIX_FMT_YUVA422P10
, AV_PIX_FMT_YUVA422P12
, AV_PIX_FMT_YUVA422P16
,
74 AV_PIX_FMT_YUVA420P9
, AV_PIX_FMT_YUVA420P10
, AV_PIX_FMT_YUVA420P16
,
75 AV_PIX_FMT_GBRAP
, AV_PIX_FMT_GBRAP10
, AV_PIX_FMT_GBRAP12
, AV_PIX_FMT_GBRAP16
,
76 AV_PIX_FMT_GBRPF32
, AV_PIX_FMT_GBRAPF32
,
80 static av_cold
int init(AVFilterContext
*ctx
)
82 AmplifyContext
*s
= ctx
->priv
;
84 s
->nb_inputs
= s
->radius
* 2 + 1;
86 s
->frames
= av_calloc(s
->nb_inputs
, sizeof(*s
->frames
));
88 return AVERROR(ENOMEM
);
93 typedef struct ThreadData
{
97 #define AMPLIFY_SLICE(type, stype, clip) \
98 const stype limit[2] = { s->llimit, s->hlimit }; \
100 for (int p = 0; p < s->nb_planes; p++) { \
101 const int slice_start = (s->height[p] * jobnr) / nb_jobs; \
102 const int slice_end = (s->height[p] * (jobnr+1)) / nb_jobs; \
103 type *dst = (type *)(out->data[p] + slice_start * out->linesize[p]); \
104 ptrdiff_t dst_linesize = out->linesize[p] / sizeof(type); \
106 if (!((1 << p) & s->planes)) { \
107 av_image_copy_plane((uint8_t *)dst, out->linesize[p], \
108 in[radius]->data[p] + slice_start * in[radius]->linesize[p], \
109 in[radius]->linesize[p], \
110 s->linesize[p], slice_end - slice_start); \
114 for (int y = slice_start; y < slice_end; y++) { \
115 for (int x = 0; x < s->linesize[p] / sizeof(type); x++) { \
116 stype src = *(type *)(in[radius]->data[p] + y * in[radius]->linesize[p] + x * sizeof(type));\
117 float diff, abs_diff, avg; \
120 for (int i = 0; i < nb_inputs; i++) { \
121 sum += *(type *)(in[i]->data[p] + y * in[i]->linesize[p] + x * sizeof(type));\
126 abs_diff = fabsf(diff); \
128 if (abs_diff < threshold && abs_diff > tolerance) { \
129 float amp = copysignf(fminf(abs_diff * factor, limit[diff >= 0]), diff); \
130 dst[x] = clip(src + amp, depth); \
136 dst += dst_linesize; \
140 #define CLIP8(x, depth) av_clip_uint8(lrintf(x))
141 #define CLIP16(x, depth) av_clip_uintp2_c(lrintf(x), depth)
142 #define NOP(x, depth) (x)
144 static int amplify_frame(AVFilterContext
*ctx
, void *arg
, int jobnr
, int nb_jobs
)
146 AmplifyContext
*s
= ctx
->priv
;
147 ThreadData
*td
= arg
;
148 AVFrame
**in
= td
->in
;
149 AVFrame
*out
= td
->out
;
150 const int radius
= s
->radius
;
151 const int nb_inputs
= s
->nb_inputs
;
152 const float threshold
= s
->threshold
;
153 const float tolerance
= s
->tolerance
;
154 const float scale
= 1.f
/ nb_inputs
;
155 const float factor
= s
->factor
;
156 const int depth
= s
->depth
;
159 AMPLIFY_SLICE(uint8_t, int, CLIP8
)
160 } else if (s
->depth
<= 16) {
161 AMPLIFY_SLICE(uint16_t, int, CLIP16
)
163 AMPLIFY_SLICE(float, float, NOP
)
169 static int config_output(AVFilterLink
*outlink
)
171 AVFilterContext
*ctx
= outlink
->src
;
172 AmplifyContext
*s
= ctx
->priv
;
173 AVFilterLink
*inlink
= ctx
->inputs
[0];
176 s
->desc
= av_pix_fmt_desc_get(outlink
->format
);
179 s
->nb_planes
= av_pix_fmt_count_planes(outlink
->format
);
180 s
->depth
= s
->desc
->comp
[0].depth
;
182 if ((ret
= av_image_fill_linesizes(s
->linesize
, inlink
->format
, inlink
->w
)) < 0)
185 s
->height
[1] = s
->height
[2] = AV_CEIL_RSHIFT(inlink
->h
, s
->desc
->log2_chroma_h
);
186 s
->height
[0] = s
->height
[3] = inlink
->h
;
191 static av_cold
void uninit(AVFilterContext
*ctx
)
193 AmplifyContext
*s
= ctx
->priv
;
197 for (i
= 0; i
< s
->nb_frames
; i
++)
198 av_frame_free(&s
->frames
[i
]);
200 av_freep(&s
->frames
);
203 static int filter_frame(AVFilterLink
*inlink
, AVFrame
*in
)
205 AVFilterContext
*ctx
= inlink
->dst
;
206 AVFilterLink
*outlink
= ctx
->outputs
[0];
207 AmplifyContext
*s
= ctx
->priv
;
211 if (s
->nb_frames
< s
->nb_inputs
) {
212 s
->frames
[s
->nb_frames
] = in
;
216 av_frame_free(&s
->frames
[0]);
217 memmove(&s
->frames
[0], &s
->frames
[1], sizeof(*s
->frames
) * (s
->nb_inputs
- 1));
218 s
->frames
[s
->nb_inputs
- 1] = in
;
221 if (!ctx
->is_disabled
) {
222 out
= ff_get_video_buffer(outlink
, outlink
->w
, outlink
->h
);
224 return AVERROR(ENOMEM
);
225 av_frame_copy_props(out
, s
->frames
[0]);
229 ff_filter_execute(ctx
, amplify_frame
, &td
, NULL
,
230 FFMIN(s
->height
[1], ff_filter_get_nb_threads(ctx
)));
232 out
= av_frame_clone(s
->frames
[s
->radius
]);
234 return AVERROR(ENOMEM
);
235 out
->pts
= s
->frames
[0]->pts
;
238 return ff_filter_frame(outlink
, out
);
241 #define OFFSET(x) offsetof(AmplifyContext, x)
242 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
243 #define VFT AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_RUNTIME_PARAM
245 static const AVOption amplify_options
[] = {
246 { "radius", "set radius", OFFSET(radius
), AV_OPT_TYPE_INT
, {.i64
=2}, 1, 63, .flags
= FLAGS
},
247 { "factor", "set factor", OFFSET(factor
), AV_OPT_TYPE_FLOAT
, {.dbl
=2}, 0, UINT16_MAX
, .flags
= VFT
},
248 { "threshold", "set threshold", OFFSET(threshold
), AV_OPT_TYPE_FLOAT
, {.dbl
=10}, 0, UINT16_MAX
, .flags
= VFT
},
249 { "tolerance", "set tolerance", OFFSET(tolerance
), AV_OPT_TYPE_FLOAT
, {.dbl
=0}, 0, UINT16_MAX
, .flags
= VFT
},
250 { "low", "set low limit for amplification", OFFSET(llimit
), AV_OPT_TYPE_FLOAT
, {.dbl
=UINT16_MAX
}, 0, UINT16_MAX
, .flags
= VFT
},
251 { "high", "set high limit for amplification", OFFSET(hlimit
), AV_OPT_TYPE_FLOAT
, {.dbl
=UINT16_MAX
}, 0, UINT16_MAX
, .flags
= VFT
},
252 { "planes", "set what planes to filter", OFFSET(planes
), AV_OPT_TYPE_FLAGS
, {.i64
=7}, 0, 15, VFT
},
256 static const AVFilterPad inputs
[] = {
259 .type
= AVMEDIA_TYPE_VIDEO
,
260 .filter_frame
= filter_frame
,
264 static const AVFilterPad outputs
[] = {
267 .type
= AVMEDIA_TYPE_VIDEO
,
268 .config_props
= config_output
,
272 AVFILTER_DEFINE_CLASS(amplify
);
274 const FFFilter ff_vf_amplify
= {
276 .p
.description
= NULL_IF_CONFIG_SMALL("Amplify changes between successive video frames."),
277 .p
.priv_class
= &lify_class
,
278 .p
.flags
= AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
| AVFILTER_FLAG_SLICE_THREADS
,
279 .priv_size
= sizeof(AmplifyContext
),
280 FILTER_OUTPUTS(outputs
),
281 FILTER_INPUTS(inputs
),
282 FILTER_PIXFMTS_ARRAY(pixel_fmts
),
285 .process_command
= ff_filter_process_command
,