diff options
Diffstat (limited to 'src/gallium/state_trackers')
-rw-r--r-- | src/gallium/state_trackers/vdpau/decode.c | 53 | ||||
-rw-r--r-- | src/gallium/state_trackers/vdpau/vdpau_private.h | 2 | ||||
-rw-r--r-- | src/gallium/state_trackers/xorg/xvmc/surface.c | 115 | ||||
-rw-r--r-- | src/gallium/state_trackers/xorg/xvmc/xvmc_private.h | 6 |
4 files changed, 108 insertions, 68 deletions
diff --git a/src/gallium/state_trackers/vdpau/decode.c b/src/gallium/state_trackers/vdpau/decode.c index 96542f874d9..3bf05bea21f 100644 --- a/src/gallium/state_trackers/vdpau/decode.c +++ b/src/gallium/state_trackers/vdpau/decode.c @@ -107,7 +107,7 @@ error_buffer: for (i = 0; i < VL_NUM_DECODE_BUFFERS; ++i) if (vldecoder->buffer[i]) - vldecoder->buffer[i]->destroy(vldecoder->buffer[i]); + vldecoder->decoder->destroy_buffer(vldecoder->decoder, vldecoder->buffer[i]); vldecoder->decoder->destroy(vldecoder->decoder); @@ -130,7 +130,7 @@ vlVdpDecoderDestroy(VdpDecoder decoder) for (i = 0; i < VL_NUM_DECODE_BUFFERS; ++i) if (vldecoder->buffer[i]) - vldecoder->buffer[i]->destroy(vldecoder->buffer[i]); + vldecoder->decoder->destroy_buffer(vldecoder->decoder, vldecoder->buffer[i]); vldecoder->decoder->destroy(vldecoder->decoder); @@ -162,8 +162,6 @@ vlVdpDecoderGetParameters(VdpDecoder decoder, static VdpStatus vlVdpDecoderRenderMpeg12(struct pipe_video_decoder *decoder, - struct pipe_video_decode_buffer *buffer, - struct pipe_video_buffer *target, VdpPictureInfoMPEG1Or2 *picture_info, uint32_t bitstream_buffer_count, VdpBitstreamBuffer const *bitstream_buffers) @@ -176,23 +174,25 @@ vlVdpDecoderRenderMpeg12(struct pipe_video_decoder *decoder, VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG2\n"); + i = 0; + /* if surfaces equals VDP_STATUS_INVALID_HANDLE, they are not used */ - if (picture_info->forward_reference == VDP_INVALID_HANDLE) - ref_frames[0] = NULL; - else { - ref_frames[0] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer; - if (!ref_frames[0]) + if (picture_info->forward_reference != VDP_INVALID_HANDLE) { + ref_frames[i] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer; + if (!ref_frames[i]) return VDP_STATUS_INVALID_HANDLE; + ++i; } - if (picture_info->backward_reference == VDP_INVALID_HANDLE) - ref_frames[1] = NULL; - else { - ref_frames[1] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer; - if (!ref_frames[1]) + if (picture_info->backward_reference != VDP_INVALID_HANDLE) { + ref_frames[i] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer; + if (!ref_frames[i]) return VDP_STATUS_INVALID_HANDLE; + ++i; } + decoder->set_reference_frames(decoder, ref_frames, i); + memset(&picture, 0, sizeof(picture)); picture.base.profile = decoder->profile; picture.picture_coding_type = picture_info->picture_coding_type; @@ -207,19 +207,19 @@ vlVdpDecoderRenderMpeg12(struct pipe_video_decoder *decoder, picture.f_code[1][0] = picture_info->f_code[1][0] - 1; picture.f_code[1][1] = picture_info->f_code[1][1] - 1; - buffer->begin_frame(buffer); + decoder->set_picture_parameters(decoder, &picture.base); memcpy(intra_quantizer_matrix, picture_info->intra_quantizer_matrix, sizeof(intra_quantizer_matrix)); intra_quantizer_matrix[0] = 1 << (7 - picture_info->intra_dc_precision); - buffer->set_quant_matrix(buffer, intra_quantizer_matrix, picture_info->non_intra_quantizer_matrix); + decoder->set_quant_matrix(decoder, intra_quantizer_matrix, picture_info->non_intra_quantizer_matrix); - for (i = 0; i < bitstream_buffer_count; ++i) - buffer->decode_bitstream(buffer, bitstream_buffers[i].bitstream_bytes, - bitstream_buffers[i].bitstream, &picture.base, num_ycbcr_blocks); + decoder->begin_frame(decoder); - buffer->end_frame(buffer); + for (i = 0; i < bitstream_buffer_count; ++i) + decoder->decode_bitstream(decoder, bitstream_buffers[i].bitstream_bytes, + bitstream_buffers[i].bitstream, num_ycbcr_blocks); - decoder->flush_buffer(buffer, num_ycbcr_blocks, ref_frames, target); + decoder->end_frame(decoder, num_ycbcr_blocks); return VDP_STATUS_OK; } @@ -261,11 +261,12 @@ vlVdpDecoderRender(VdpDecoder decoder, case PIPE_VIDEO_PROFILE_MPEG2_MAIN: ++vldecoder->cur_buffer; vldecoder->cur_buffer %= VL_NUM_DECODE_BUFFERS; - return vlVdpDecoderRenderMpeg12(vldecoder->decoder, - vldecoder->buffer[vldecoder->cur_buffer], - vlsurf->video_buffer, - (VdpPictureInfoMPEG1Or2 *)picture_info, - bitstream_buffer_count,bitstream_buffers); + + vldecoder->decoder->set_decode_buffer(vldecoder->decoder, vldecoder->buffer[vldecoder->cur_buffer]); + vldecoder->decoder->set_decode_target(vldecoder->decoder, vlsurf->video_buffer); + + return vlVdpDecoderRenderMpeg12(vldecoder->decoder, (VdpPictureInfoMPEG1Or2 *)picture_info, + bitstream_buffer_count, bitstream_buffers); break; default: diff --git a/src/gallium/state_trackers/vdpau/vdpau_private.h b/src/gallium/state_trackers/vdpau/vdpau_private.h index e5d945629fb..5c68cd7c47a 100644 --- a/src/gallium/state_trackers/vdpau/vdpau_private.h +++ b/src/gallium/state_trackers/vdpau/vdpau_private.h @@ -256,7 +256,7 @@ typedef struct { vlVdpDevice *device; struct pipe_video_decoder *decoder; - struct pipe_video_decode_buffer *buffer[VL_NUM_DECODE_BUFFERS]; + void *buffer[VL_NUM_DECODE_BUFFERS]; unsigned cur_buffer; } vlVdpDecoder; diff --git a/src/gallium/state_trackers/xorg/xvmc/surface.c b/src/gallium/state_trackers/xorg/xvmc/surface.c index 0c53b730287..002c35ae445 100644 --- a/src/gallium/state_trackers/xorg/xvmc/surface.c +++ b/src/gallium/state_trackers/xorg/xvmc/surface.c @@ -252,9 +252,37 @@ MacroBlocksToPipe(XvMCSurfacePrivate *surface, } static void -unmap_and_flush_surface(XvMCSurfacePrivate *surface) +SetDecoderStatus(XvMCSurfacePrivate *surface) { + struct pipe_video_decoder *decoder; struct pipe_video_buffer *ref_frames[2]; + + XvMCContextPrivate *context_priv; + + unsigned i, num_refs = 0; + + assert(surface); + + context_priv = surface->context->privData; + decoder = context_priv->decoder; + + decoder->set_decode_buffer(decoder, surface->decode_buffer); + decoder->set_decode_target(decoder, surface->video_buffer); + + for (i = 0; i < 2; ++i) { + if (surface->ref[i].surface) { + XvMCSurfacePrivate *ref = surface->ref[i].surface->privData; + + if (ref) + ref_frames[num_refs++] = ref->video_buffer; + } + } + decoder->set_reference_frames(decoder, ref_frames, num_refs); +} + +static void +RecursiveEndFrame(XvMCSurfacePrivate *surface) +{ XvMCContextPrivate *context_priv; unsigned i, num_ycbcr_blocks[3]; @@ -264,27 +292,27 @@ unmap_and_flush_surface(XvMCSurfacePrivate *surface) for ( i = 0; i < 2; ++i ) { if (surface->ref[i].surface) { - XvMCSurfacePrivate *ref = surface->ref[i].surface->privData; + XvMCSurface *ref = surface->ref[i].surface; assert(ref); - unmap_and_flush_surface(ref); surface->ref[i].surface = NULL; - ref_frames[i] = ref->video_buffer; - } else { - ref_frames[i] = NULL; + RecursiveEndFrame(ref->privData); + surface->ref[i].surface = ref; } } - if (surface->mapped) { - surface->decode_buffer->end_frame(surface->decode_buffer); + if (surface->frame_started) { + surface->frame_started = 0; + SetDecoderStatus(surface); + for (i = 0; i < 3; ++i) num_ycbcr_blocks[i] = surface->ycbcr[i].num_blocks_added; - context_priv->decoder->flush_buffer(surface->decode_buffer, - num_ycbcr_blocks, - ref_frames, - surface->video_buffer); - surface->mapped = 0; + + for (i = 0; i < 2; ++i) + surface->ref[i].surface = NULL; + + context_priv->decoder->end_frame(context_priv->decoder, num_ycbcr_blocks); } } @@ -323,9 +351,7 @@ Status XvMCCreateSurface(Display *dpy, XvMCContext *context, XvMCSurface *surfac return BadAlloc; surface_priv->decode_buffer = context_priv->decoder->create_buffer(context_priv->decoder); - surface_priv->decode_buffer->set_quant_matrix(surface_priv->decode_buffer, dummy_quant, dummy_quant); - - surface_priv->mv_stride = surface_priv->decode_buffer->get_mv_stream_stride(surface_priv->decode_buffer); + context_priv->decoder->set_quant_matrix(context_priv->decoder, dummy_quant, dummy_quant); surface_priv->video_buffer = pipe->create_video_buffer ( pipe, PIPE_FORMAT_NV12, context_priv->decoder->chroma_format, @@ -355,8 +381,9 @@ Status XvMCRenderSurface(Display *dpy, XvMCContext *context, unsigned int pictur XvMCMacroBlockArray *macroblocks, XvMCBlockArray *blocks ) { - struct pipe_video_decode_buffer *t_buffer; + struct pipe_video_decoder *decoder; + XvMCContextPrivate *context_priv; XvMCSurfacePrivate *target_surface_priv; XvMCSurfacePrivate *past_surface_priv; XvMCSurfacePrivate *future_surface_priv; @@ -394,6 +421,9 @@ Status XvMCRenderSurface(Display *dpy, XvMCContext *context, unsigned int pictur assert(flags == 0 || flags == XVMC_SECOND_FIELD); + context_priv = context->privData; + decoder = context_priv->decoder; + target_surface_priv = target_surface->privData; past_surface_priv = past_surface ? past_surface->privData : NULL; future_surface_priv = future_surface ? future_surface->privData : NULL; @@ -402,47 +432,48 @@ Status XvMCRenderSurface(Display *dpy, XvMCContext *context, unsigned int pictur assert(!past_surface || past_surface_priv->context == context); assert(!future_surface || future_surface_priv->context == context); - t_buffer = target_surface_priv->decode_buffer; - - // enshure that all reference frames are flushed - // not really nessasary, but speeds ups rendering + // call end frame on all referenced frames if (past_surface) - unmap_and_flush_surface(past_surface->privData); + RecursiveEndFrame(past_surface->privData); if (future_surface) - unmap_and_flush_surface(future_surface->privData); + RecursiveEndFrame(future_surface->privData); xvmc_mb = macroblocks->macro_blocks + first_macroblock; /* If the surface we're rendering hasn't changed the ref frames shouldn't change. */ - if (target_surface_priv->mapped && ( + if (target_surface_priv->frame_started && ( target_surface_priv->ref[0].surface != past_surface || target_surface_priv->ref[1].surface != future_surface || (xvmc_mb->x == 0 && xvmc_mb->y == 0))) { - // If they change anyway we need to clear our surface - unmap_and_flush_surface(target_surface_priv); + // If they change anyway we must assume that the current frame is ended + RecursiveEndFrame(target_surface_priv); } - if (!target_surface_priv->mapped) { - t_buffer->begin_frame(t_buffer); + target_surface_priv->ref[0].surface = past_surface; + target_surface_priv->ref[1].surface = future_surface; + + SetDecoderStatus(target_surface_priv); + + if (!target_surface_priv->frame_started) { + decoder->begin_frame(decoder); + target_surface_priv->mv_stride = decoder->get_mv_stream_stride(decoder); for (i = 0; i < 3; ++i) { target_surface_priv->ycbcr[i].num_blocks_added = 0; - target_surface_priv->ycbcr[i].stream = t_buffer->get_ycbcr_stream(t_buffer, i); - target_surface_priv->ycbcr[i].buffer = t_buffer->get_ycbcr_buffer(t_buffer, i); + target_surface_priv->ycbcr[i].stream = decoder->get_ycbcr_stream(decoder, i); + target_surface_priv->ycbcr[i].buffer = decoder->get_ycbcr_buffer(decoder, i); } for (i = 0; i < 2; ++i) { - target_surface_priv->ref[i].surface = i == 0 ? past_surface : future_surface; - if (target_surface_priv->ref[i].surface) - target_surface_priv->ref[i].mv = t_buffer->get_mv_stream(t_buffer, i); + target_surface_priv->ref[i].mv = decoder->get_mv_stream(decoder, i); else target_surface_priv->ref[i].mv = NULL; } - target_surface_priv->mapped = 1; + target_surface_priv->frame_started = 1; } MacroBlocksToPipe(target_surface_priv, picture_structure, xvmc_mb, blocks, num_macroblocks); @@ -543,7 +574,9 @@ Status XvMCPutSurface(Display *dpy, XvMCSurface *surface, Drawable drawable, assert(desty + desth - 1 < drawable_surface->height); */ - unmap_and_flush_surface(surface_priv); + RecursiveEndFrame(surface_priv); + + context_priv->decoder->flush(context_priv->decoder); vl_compositor_clear_layers(compositor); vl_compositor_set_buffer_layer(compositor, 0, surface_priv->video_buffer, &src_rect, NULL); @@ -630,6 +663,9 @@ PUBLIC Status XvMCDestroySurface(Display *dpy, XvMCSurface *surface) { XvMCSurfacePrivate *surface_priv; + XvMCContextPrivate *context_priv; + + unsigned num_ycbcr_buffers[3] = { 0, 0, 0 }; XVMC_MSG(XVMC_TRACE, "[XvMC] Destroying surface %p.\n", surface); @@ -639,10 +675,13 @@ Status XvMCDestroySurface(Display *dpy, XvMCSurface *surface) return XvMCBadSurface; surface_priv = surface->privData; + context_priv = surface_priv->context->privData; - if (surface_priv->mapped) - surface_priv->decode_buffer->end_frame(surface_priv->decode_buffer); - surface_priv->decode_buffer->destroy(surface_priv->decode_buffer); + if (surface_priv->frame_started) { + SetDecoderStatus(surface_priv); + context_priv->decoder->end_frame(context_priv->decoder, num_ycbcr_buffers); + } + context_priv->decoder->destroy_buffer(context_priv->decoder, surface_priv->decode_buffer); surface_priv->video_buffer->destroy(surface_priv->video_buffer); FREE(surface_priv); surface->privData = NULL; diff --git a/src/gallium/state_trackers/xorg/xvmc/xvmc_private.h b/src/gallium/state_trackers/xorg/xvmc/xvmc_private.h index 5f8d9d13cb3..5b3debdb78b 100644 --- a/src/gallium/state_trackers/xorg/xvmc/xvmc_private.h +++ b/src/gallium/state_trackers/xorg/xvmc/xvmc_private.h @@ -45,7 +45,6 @@ struct vl_context; struct pipe_video_decoder; -struct pipe_video_decode_buffer; struct pipe_video_buffer; struct pipe_sampler_view; @@ -70,10 +69,11 @@ typedef struct typedef struct { - struct pipe_video_decode_buffer *decode_buffer; + void *decode_buffer; struct pipe_video_buffer *video_buffer; - bool mapped; // are we still mapped to memory? + // have we allready told the decoder to start a frame + bool frame_started; struct { unsigned num_blocks_added; |