summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--libhb/common.c18
-rw-r--r--libhb/cropscale.c4
-rw-r--r--libhb/decavcodec.c2
-rw-r--r--libhb/encavcodec.c2
-rw-r--r--libhb/encx264.c3
-rw-r--r--libhb/encx265.c6
-rw-r--r--libhb/handbrake/common.h3
-rw-r--r--libhb/rendersub.c107
-rw-r--r--libhb/sync.c2
-rw-r--r--libhb/work.c52
10 files changed, 181 insertions, 18 deletions
diff --git a/libhb/common.c b/libhb/common.c
index 615b4a8aa..c8536dbb3 100644
--- a/libhb/common.c
+++ b/libhb/common.c
@@ -5961,3 +5961,21 @@ const char * hb_get_color_range_name(int range)
return "mpeg";
}
}
+
+int hb_get_bit_depth(int format)
+{
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
+ int i, min, max;
+
+ if (!desc || !desc->nb_components) {
+ min = max = 0;
+ }
+
+ min = INT_MAX, max = -INT_MAX;
+ for (i = 0; i < desc->nb_components; i++) {
+ min = FFMIN(desc->comp[i].depth, min);
+ max = FFMAX(desc->comp[i].depth, max);
+ }
+
+ return max;
+}
diff --git a/libhb/cropscale.c b/libhb/cropscale.c
index 4006a271c..34721a18e 100644
--- a/libhb/cropscale.c
+++ b/libhb/cropscale.c
@@ -172,9 +172,7 @@ static int crop_scale_init(hb_filter_object_t * filter, hb_filter_init_t * init)
if (!hb_qsv_hw_filters_are_enabled(init->job))
#endif
{
- // TODO: Support other pix formats
- // Force output to YUV420P for until other formats are supported
- hb_dict_set(avsettings, "pix_fmts", hb_value_string("yuv420p"));
+ hb_dict_set(avsettings, "pix_fmts", hb_value_string(hb_get_format_name(init->pix_fmt)));
hb_dict_set(avfilter, "format", avsettings);
hb_value_array_append(avfilters, avfilter);
}
diff --git a/libhb/decavcodec.c b/libhb/decavcodec.c
index 091a2786d..50d706ec0 100644
--- a/libhb/decavcodec.c
+++ b/libhb/decavcodec.c
@@ -1190,7 +1190,7 @@ int reinit_video_filters(hb_work_private_t * pv)
hb_avfilter_append_dict(filters, "scale", settings);
settings = hb_dict_init();
- hb_dict_set(settings, "pix_fmts", hb_value_string("yuv420p"));
+ hb_dict_set(settings, "pix_fmts", hb_value_string(hb_get_format_name(pix_fmt)));
hb_avfilter_append_dict(filters, "format", settings);
}
}
diff --git a/libhb/encavcodec.c b/libhb/encavcodec.c
index 543ce95f2..2263a7337 100644
--- a/libhb/encavcodec.c
+++ b/libhb/encavcodec.c
@@ -422,7 +422,7 @@ int encavcodecInit( hb_work_object_t * w, hb_job_t * job )
}
context->width = job->width;
context->height = job->height;
- context->pix_fmt = AV_PIX_FMT_YUV420P;
+ context->pix_fmt = job->pix_fmt;
context->sample_aspect_ratio.num = job->par.num;
context->sample_aspect_ratio.den = job->par.den;
diff --git a/libhb/encx264.c b/libhb/encx264.c
index b7d30f9ab..f63150bcc 100644
--- a/libhb/encx264.c
+++ b/libhb/encx264.c
@@ -755,7 +755,8 @@ static hb_buffer_t *x264_encode( hb_work_object_t *w, hb_buffer_t *in )
hb_buffer_t *tmp = NULL;
/* Point x264 at our current buffers Y(UV) data. */
- if (pv->pic_in.img.i_csp & X264_CSP_HIGH_DEPTH)
+ if (pv->pic_in.img.i_csp & X264_CSP_HIGH_DEPTH &&
+ job->pix_fmt == AV_PIX_FMT_YUV420P)
{
tmp = expand_buf(pv->api->bit_depth, in);
pv->pic_in.img.i_stride[0] = tmp->plane[0].stride;
diff --git a/libhb/encx265.c b/libhb/encx265.c
index 3e069bb9b..c7f4f52a9 100644
--- a/libhb/encx265.c
+++ b/libhb/encx265.c
@@ -60,6 +60,7 @@ struct hb_work_private_s
// Multiple bit-depth
const x265_api * api;
+ int bit_depth;
};
static int param_parse(hb_work_private_t *pv, x265_param *param,
@@ -194,6 +195,9 @@ int encx265Init(hb_work_object_t *w, hb_job_t *job)
goto fail;
}
+ /* Bit depth */
+ pv->bit_depth = hb_get_bit_depth(job->pix_fmt);
+
/* iterate through x265_opts and parse the options */
hb_dict_t *x265_opts;
x265_opts = hb_encopts_to_dict(job->encoder_options, job->vcodec);
@@ -477,7 +481,7 @@ static hb_buffer_t* x265_encode(hb_work_object_t *w, hb_buffer_t *in)
pic_in.planes[2] = in->plane[2].data;
pic_in.poc = pv->frames_in++;
pic_in.pts = in->s.start;
- pic_in.bitDepth = 8;
+ pic_in.bitDepth = pv->bit_depth;
if (in->s.new_chap && job->chapter_markers)
{
diff --git a/libhb/handbrake/common.h b/libhb/handbrake/common.h
index fb2123a7c..282a8a790 100644
--- a/libhb/handbrake/common.h
+++ b/libhb/handbrake/common.h
@@ -1449,6 +1449,9 @@ int hb_output_color_prim(hb_job_t * job);
int hb_output_color_transfer(hb_job_t * job);
int hb_output_color_matrix(hb_job_t * job);
+const char * hb_get_format_name(int format);
+int hb_get_bit_depth(int format);
+
#define HB_NEG_FLOAT_REG "(([-])?(([0-9]+([.,][0-9]+)?)|([.,][0-9]+))"
#define HB_FLOAT_REG "(([0-9]+([.,][0-9]+)?)|([.,][0-9]+))"
#define HB_NEG_INT_REG "(([-]?[0-9]+)"
diff --git a/libhb/rendersub.c b/libhb/rendersub.c
index 4b0fae6d1..8d9f7f2c1 100644
--- a/libhb/rendersub.c
+++ b/libhb/rendersub.c
@@ -188,13 +188,112 @@ static void blend( hb_buffer_t *dst, hb_buffer_t *src, int left, int top )
}
}
-// applies subtitle 'sub' YUVA420P buffer into destination 'buf'
-// 'buf' is currently YUV420P, but in future will be other formats as well
+static void blend8on1x( hb_buffer_t *dst, hb_buffer_t *src, int left, int top, int shift )
+{
+ int xx, yy;
+ int ww, hh;
+ int x0, y0;
+ int max;
+
+ uint8_t *y_in;
+ uint8_t *u_in;
+ uint8_t *v_in;
+ uint8_t *a_in;
+
+ uint16_t *y_out;
+ uint16_t *u_out;
+ uint16_t *v_out;
+ uint16_t alpha;
+
+ x0 = y0 = 0;
+ if( left < 0 )
+ {
+ x0 = -left;
+ }
+ if( top < 0 )
+ {
+ y0 = -top;
+ }
+
+ ww = src->f.width;
+ if( src->f.width - x0 > dst->f.width - left )
+ {
+ ww = dst->f.width - left + x0;
+ }
+ hh = src->f.height;
+ if( src->f.height - y0 > dst->f.height - top )
+ {
+ hh = dst->f.height - top + y0;
+ }
+
+ max = (256 << shift) -1;
+
+ // Blend luma
+ for( yy = y0; yy < hh; yy++ )
+ {
+ y_in = src->plane[0].data + yy * src->plane[0].stride;
+ y_out = (uint16_t*)(dst->plane[0].data + ( yy + top ) * dst->plane[0].stride);
+ a_in = src->plane[3].data + yy * src->plane[3].stride;
+ for( xx = x0; xx < ww; xx++ )
+ {
+ alpha = a_in[xx] << shift;
+ /*
+ * Merge the luminance and alpha with the picture
+ */
+ y_out[left + xx] =
+ ( (uint32_t)y_out[left + xx] * ( max - alpha ) +
+ ((uint32_t)y_in[xx] << shift) * alpha ) / max;
+ }
+ }
+
+ // Blend U & V
+ int hshift = 0;
+ int wshift = 0;
+ if( dst->plane[1].height < dst->plane[0].height )
+ hshift = 1;
+ if( dst->plane[1].width < dst->plane[0].width )
+ wshift = 1;
+
+ for( yy = y0 >> hshift; yy < hh >> hshift; yy++ )
+ {
+ u_in = src->plane[1].data + yy * src->plane[1].stride;
+ u_out = (uint16_t*)(dst->plane[1].data + ( yy + ( top >> hshift ) ) * dst->plane[1].stride);
+ v_in = src->plane[2].data + yy * src->plane[2].stride;
+ v_out = (uint16_t*)(dst->plane[2].data + ( yy + ( top >> hshift ) ) * dst->plane[2].stride);
+ a_in = src->plane[3].data + ( yy << hshift ) * src->plane[3].stride;
+
+ for( xx = x0 >> wshift; xx < ww >> wshift; xx++ )
+ {
+ alpha = a_in[xx << wshift] << shift;
+
+ // Blend averge U and alpha
+ u_out[(left >> wshift) + xx] =
+ ( (uint32_t)u_out[(left >> wshift) + xx] * ( max - alpha ) +
+ ((uint32_t)u_in[xx] << shift) * alpha ) / max;
+
+ // Blend V and alpha
+ v_out[(left >> wshift) + xx] =
+ ( (uint32_t)v_out[(left >> wshift) + xx] * ( max - alpha ) +
+ ((uint32_t)v_in[xx] << shift) * alpha ) / max;
+ }
+ }
+}
+
// Assumes that the input destination buffer has the same dimensions
// as the original title dimensions
static void ApplySub( hb_filter_private_t * pv, hb_buffer_t * buf, hb_buffer_t * sub )
{
- blend( buf, sub, sub->f.x, sub->f.y );
+ switch (pv->output.pix_fmt) {
+ case AV_PIX_FMT_YUV420P10:
+ blend8on1x(buf, sub, sub->f.x, sub->f.y, 2);
+ break;
+ case AV_PIX_FMT_YUV420P12:
+ blend8on1x(buf, sub, sub->f.x, sub->f.y, 4);
+ break;
+ default:
+ blend(buf, sub, sub->f.x, sub->f.y);
+ break;
+ }
}
static hb_buffer_t * ScaleSubtitle(hb_filter_private_t *pv,
@@ -604,7 +703,7 @@ static int ssa_post_init( hb_filter_object_t * filter, hb_job_t * job )
ass_set_frame_size( pv->renderer, width, height);
double par = (double)job->par.num / job->par.den;
- ass_set_aspect_ratio( pv->renderer, 1, par );
+ ass_set_pixel_aspect( pv->renderer, par );
return 0;
}
diff --git a/libhb/sync.c b/libhb/sync.c
index ef75ded1e..b68d3ae73 100644
--- a/libhb/sync.c
+++ b/libhb/sync.c
@@ -371,7 +371,7 @@ static hb_buffer_t * CreateBlackBuf( sync_stream_t * stream,
{
if (buf == NULL)
{
- buf = hb_frame_buffer_init(AV_PIX_FMT_YUV420P,
+ buf = hb_frame_buffer_init(stream->common->job->pix_fmt,
stream->common->job->title->geometry.width,
stream->common->job->title->geometry.height);
memset(buf->plane[0].data, 0x00, buf->plane[0].size);
diff --git a/libhb/work.c b/libhb/work.c
index cae0dcbbf..9567d195f 100644
--- a/libhb/work.c
+++ b/libhb/work.c
@@ -433,7 +433,7 @@ void hb_display_job_info(hb_job_t *job)
else
#endif
{
- hb_log(" + decoder: %s", title->video_codec_name);
+ hb_log(" + decoder: %s %d-bit", title->video_codec_name, hb_get_bit_depth(job->pix_fmt));
}
if( title->video_bitrate )
@@ -773,6 +773,49 @@ void correct_framerate( hb_interjob_t * interjob, hb_job_t * job )
}
}
+static int bit_depth_is_supported(hb_job_t * job, int bit_depth)
+{
+ for (int i = 0; i < hb_list_count(job->list_filter); i++)
+ {
+ hb_filter_object_t *filter = hb_list_item(job->list_filter, i);
+
+ switch (filter->id) {
+ case HB_FILTER_DETELECINE:
+ case HB_FILTER_COMB_DETECT:
+ case HB_FILTER_DECOMB:
+ case HB_FILTER_DENOISE:
+ case HB_FILTER_NLMEANS:
+ case HB_FILTER_CHROMA_SMOOTH:
+ case HB_FILTER_LAPSHARP:
+ case HB_FILTER_UNSHARP:
+ case HB_FILTER_GRAYSCALE:
+ return 0;
+ }
+ }
+
+ if (hb_video_encoder_get_depth(job->vcodec) < bit_depth)
+ {
+ return 0;
+ }
+
+ return 1;
+}
+
+static int get_best_pix_ftm(hb_job_t * job)
+{
+ int bit_depth = hb_get_bit_depth(job->title->pix_fmt);
+
+ if (bit_depth >= 12 && bit_depth_is_supported(job, 12))
+ {
+ return AV_PIX_FMT_YUV420P12;
+ }
+ if (bit_depth >= 10 && bit_depth_is_supported(job, 10))
+ {
+ return AV_PIX_FMT_YUV420P10;
+ }
+ return AV_PIX_FMT_YUV420P;
+}
+
static void analyze_subtitle_scan( hb_job_t * job )
{
hb_subtitle_t *subtitle;
@@ -1383,11 +1426,7 @@ static void do_job(hb_job_t *job)
init.time_base.num = 1;
init.time_base.den = 90000;
init.job = job;
- // TODO: When more complete pix format support is complete this
- // needs to be updated to reflect the pix_fmt output by
- // decavcodec.c. This may be different than title->pix_fmt
- // since we will likely only support planar YUV color formats.
- init.pix_fmt = AV_PIX_FMT_YUV420P;
+ init.pix_fmt = get_best_pix_ftm(job);
init.color_range = AVCOL_RANGE_MPEG;
init.color_prim = title->color_prim;
@@ -1416,6 +1455,7 @@ static void do_job(hb_job_t *job)
}
i++;
}
+ job->pix_fmt = init.pix_fmt;
job->width = init.geometry.width;
job->height = init.geometry.height;
job->par = init.geometry.par;