vdr-plugin-softhddevice/0000755000175000017500000000000012644034136015141 5ustar tobiastobiasvdr-plugin-softhddevice/.indent.pro0000644000175000017500000000163412644034136017226 0ustar tobiastobias--blank-lines-before-block-comments --blank-lines-after-declarations --blank-lines-after-procedures --no-blank-lines-after-commas --braces-on-if-line --no-blank-before-sizeof --comment-indentation41 --declaration-comment-column41 --no-comment-delimiters-on-blank-lines --swallow-optional-blank-lines --dont-format-comments --parameter-indentation4 --indent-level4 --line-comments-indentation0 --cuddle-else --cuddle-do-while --brace-indent0 --case-brace-indentation0 //--start-left-side-of-comments --leave-preprocessor-space //--continuation-indentation8 --case-indentation4 --else-endif-column0 --no-space-after-casts --declaration-indentation1 --dont-line-up-parentheses --no-space-after-function-call-names --space-special-semicolon --tab-size8 --use-tabs --line-length79 --comment-line-length79 --honour-newlines --dont-break-procedure-type --break-before-boolean-operator --continuation-indentation4 --ignore-newlines vdr-plugin-softhddevice/codec.c0000644000175000017500000016026412644034136016373 0ustar tobiastobias/// /// @file codec.c @brief Codec functions /// /// Copyright (c) 2009 - 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 80e63bf6670616eafd90f139ba497cee6cbbf683 $ ////////////////////////////////////////////////////////////////////////////// /// /// @defgroup Codec The codec module. /// /// This module contains all decoder and codec functions. /// It is uses ffmpeg (http://ffmpeg.org) as backend. /// /// It may work with libav (http://libav.org), but the tests show /// many bugs and incompatiblity in it. Don't use this shit. /// /// compile with pass-through support (stable, AC-3, E-AC-3 only) #define USE_PASSTHROUGH /// compile audio drift correction support (very experimental) #define USE_AUDIO_DRIFT_CORRECTION /// compile AC-3 audio drift correction support (very experimental) #define USE_AC3_DRIFT_CORRECTION /// use ffmpeg libswresample API (autodected, Makefile) #define noUSE_SWRESAMPLE /// use libav libavresample API (autodected, Makefile) #define noUSE_AVRESAMPLE #include #include #include #include #ifdef __FreeBSD__ #include #else #include #endif #include #include #include #include #define _(str) gettext(str) ///< gettext shortcut #define _N(str) str ///< gettext_noop shortcut #include #include // support old ffmpeg versions <1.0 #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,18,102) #define AVCodecID CodecID #define AV_CODEC_ID_AC3 CODEC_ID_AC3 #define AV_CODEC_ID_EAC3 CODEC_ID_EAC3 #define AV_CODEC_ID_MPEG2VIDEO CODEC_ID_MPEG2VIDEO #define AV_CODEC_ID_H264 CODEC_ID_H264 #endif #include #ifdef USE_VDPAU #include #endif #ifdef USE_SWRESAMPLE #include #endif #ifdef USE_AVRESAMPLE #include #include #endif #ifndef __USE_GNU #define __USE_GNU #endif #include #ifdef MAIN_H #include MAIN_H #endif #include "iatomic.h" #include "misc.h" #include "video.h" #include "audio.h" #include "codec.h" //---------------------------------------------------------------------------- // correct is AV_VERSION_INT(56,35,101) but some gentoo i* think // they must change it. #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,26,100) /// ffmpeg 2.6 started to show artifacts after channel switch /// to SDTV channels #define FFMPEG_WORKAROUND_ARTIFACTS 1 #endif //---------------------------------------------------------------------------- // Global //---------------------------------------------------------------------------- /// /// ffmpeg lock mutex /// /// new ffmpeg dislikes simultanous open/close /// this breaks our code, until this is fixed use lock. /// static pthread_mutex_t CodecLockMutex; /// Flag prefer fast channel switch char CodecUsePossibleDefectFrames; //---------------------------------------------------------------------------- // Video //---------------------------------------------------------------------------- #if 0 /// /// Video decoder typedef. /// //typedef struct _video_decoder_ Decoder; #endif /// /// Video decoder structure. /// struct _video_decoder_ { VideoHwDecoder *HwDecoder; ///< video hardware decoder int GetFormatDone; ///< flag get format called! AVCodec *VideoCodec; ///< video codec AVCodecContext *VideoCtx; ///< video codec context #ifdef FFMPEG_WORKAROUND_ARTIFACTS int FirstKeyFrame; ///< flag first frame #endif AVFrame *Frame; ///< decoded video frame }; //---------------------------------------------------------------------------- // Call-backs //---------------------------------------------------------------------------- /** ** Callback to negotiate the PixelFormat. ** ** @param video_ctx codec context ** @param fmt is the list of formats which are supported by ** the codec, it is terminated by -1 as 0 is a ** valid format, the formats are ordered by ** quality. */ static enum PixelFormat Codec_get_format(AVCodecContext * video_ctx, const enum PixelFormat *fmt) { VideoDecoder *decoder; decoder = video_ctx->opaque; #if LIBAVCODEC_VERSION_INT == AV_VERSION_INT(54,86,100) // this begins to stink, 1.1.2 calls get_format for each frame // 1.1.3 has the same version, but works again if (decoder->GetFormatDone) { if (decoder->GetFormatDone < 10) { ++decoder->GetFormatDone; Error ("codec/video: ffmpeg/libav buggy: get_format called again\n"); } return *fmt; // FIXME: this is hack } #endif // bug in ffmpeg 1.1.1, called with zero width or height if (!video_ctx->width || !video_ctx->height) { Error("codec/video: ffmpeg/libav buggy: width or height zero\n"); } decoder->GetFormatDone = 1; return Video_get_format(decoder->HwDecoder, video_ctx, fmt); } /** ** Video buffer management, get buffer for frame. ** ** Called at the beginning of each frame to get a buffer for it. ** ** @param video_ctx Codec context ** @param frame Get buffer for this frame */ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame) { VideoDecoder *decoder; decoder = video_ctx->opaque; #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54,86,100) // ffmpeg has this already fixed // libav 0.8.5 53.35.0 still needs this #endif if (!decoder->GetFormatDone) { // get_format missing enum PixelFormat fmts[2]; fprintf(stderr, "codec: buggy libav, use ffmpeg\n"); Warning(_("codec: buggy libav, use ffmpeg\n")); fmts[0] = video_ctx->pix_fmt; fmts[1] = PIX_FMT_NONE; Codec_get_format(video_ctx, fmts); } #ifdef USE_VDPAU // VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4 if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt && video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1) || video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) { unsigned surface; struct vdpau_render_state *vrs; surface = VideoGetSurface(decoder->HwDecoder, video_ctx); vrs = av_mallocz(sizeof(struct vdpau_render_state)); vrs->surface = surface; //Debug(3, "codec: use surface %#010x\n", surface); #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(52,48,101) frame->type = FF_BUFFER_TYPE_USER; #endif #if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,46,0) frame->age = 256 * 256 * 256 * 64; #endif // render frame->data[0] = (void *)vrs; frame->data[1] = NULL; frame->data[2] = NULL; frame->data[3] = NULL; #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(52,66,100) // reordered frames if (video_ctx->pkt) { frame->pkt_pts = video_ctx->pkt->pts; } else { frame->pkt_pts = AV_NOPTS_VALUE; } #endif return 0; } #endif // VA-API: if (video_ctx->hwaccel_context) { unsigned surface; surface = VideoGetSurface(decoder->HwDecoder, video_ctx); //Debug(3, "codec: use surface %#010x\n", surface); #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(52,48,101) frame->type = FF_BUFFER_TYPE_USER; #endif #if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,46,0) frame->age = 256 * 256 * 256 * 64; #endif // vaapi needs both fields set frame->data[0] = (void *)(size_t) surface; frame->data[3] = (void *)(size_t) surface; #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(52,66,100) // reordered frames if (video_ctx->pkt) { frame->pkt_pts = video_ctx->pkt->pts; } else { frame->pkt_pts = AV_NOPTS_VALUE; } #endif return 0; } //Debug(3, "codec: fallback to default get_buffer\n"); return avcodec_default_get_buffer(video_ctx, frame); } /** ** Video buffer management, release buffer for frame. ** Called to release buffers which were allocated with get_buffer. ** ** @param video_ctx Codec context ** @param frame Release buffer for this frame */ static void Codec_release_buffer(AVCodecContext * video_ctx, AVFrame * frame) { #ifdef USE_VDPAU // VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4 if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt && video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1) || video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) { VideoDecoder *decoder; struct vdpau_render_state *vrs; unsigned surface; decoder = video_ctx->opaque; vrs = (struct vdpau_render_state *)frame->data[0]; surface = vrs->surface; //Debug(3, "codec: release surface %#010x\n", surface); VideoReleaseSurface(decoder->HwDecoder, surface); av_freep(&vrs->bitstream_buffers); vrs->bitstream_buffers_allocated = 0; av_freep(&frame->data[0]); return; } #endif // VA-API if (video_ctx->hwaccel_context) { VideoDecoder *decoder; unsigned surface; decoder = video_ctx->opaque; surface = (unsigned)(size_t) frame->data[3]; //Debug(3, "codec: release surface %#010x\n", surface); VideoReleaseSurface(decoder->HwDecoder, surface); frame->data[0] = NULL; frame->data[3] = NULL; return; } //Debug(3, "codec: fallback to default release_buffer\n"); return avcodec_default_release_buffer(video_ctx, frame); } /// libav: compatibility hack #ifndef AV_NUM_DATA_POINTERS #define AV_NUM_DATA_POINTERS 4 #endif /** ** Draw a horizontal band. ** ** @param video_ctx Codec context ** @param frame draw this frame ** @param y y position of slice ** @param type 1->top field, 2->bottom field, 3->frame ** @param offset offset into AVFrame.data from which slice ** should be read ** @param height height of slice */ static void Codec_draw_horiz_band(AVCodecContext * video_ctx, const AVFrame * frame, __attribute__ ((unused)) int offset[AV_NUM_DATA_POINTERS], __attribute__ ((unused)) int y, __attribute__ ((unused)) int type, __attribute__ ((unused)) int height) { #ifdef USE_VDPAU // VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4 if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt && video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1) || video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) { VideoDecoder *decoder; struct vdpau_render_state *vrs; //unsigned surface; decoder = video_ctx->opaque; vrs = (struct vdpau_render_state *)frame->data[0]; //surface = vrs->surface; //Debug(3, "codec: draw slice surface %#010x\n", surface); //Debug(3, "codec: %d references\n", vrs->info.h264.num_ref_frames); VideoDrawRenderState(decoder->HwDecoder, vrs); return; } #else (void)video_ctx; (void)frame; #endif } //---------------------------------------------------------------------------- // Test //---------------------------------------------------------------------------- /** ** Allocate a new video decoder context. ** ** @param hw_decoder video hardware decoder ** ** @returns private decoder pointer for video decoder. */ VideoDecoder *CodecVideoNewDecoder(VideoHwDecoder * hw_decoder) { VideoDecoder *decoder; if (!(decoder = calloc(1, sizeof(*decoder)))) { Fatal(_("codec: can't allocate vodeo decoder\n")); } decoder->HwDecoder = hw_decoder; return decoder; } /** ** Deallocate a video decoder context. ** ** @param decoder private video decoder */ void CodecVideoDelDecoder(VideoDecoder * decoder) { free(decoder); } /** ** Open video decoder. ** ** @param decoder private video decoder ** @param codec_id video codec id */ void CodecVideoOpen(VideoDecoder * decoder, int codec_id) { AVCodec *video_codec; const char *name; Debug(3, "codec: using video codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id)); if (decoder->VideoCtx) { Error(_("codec: missing close\n")); } #if 1 // FIXME: old vdpau API: should be updated to new API name = NULL; if (!strcasecmp(VideoGetDriverName(), "vdpau")) { switch (codec_id) { case AV_CODEC_ID_MPEG2VIDEO: name = VideoHardwareDecoder < 0 ? "mpegvideo_vdpau" : NULL; break; case AV_CODEC_ID_H264: name = VideoHardwareDecoder ? "h264_vdpau" : NULL; break; } } if (name && (video_codec = avcodec_find_decoder_by_name(name))) { Debug(3, "codec: vdpau decoder found\n"); } else #endif if (!(video_codec = avcodec_find_decoder(codec_id))) { Fatal(_("codec: codec ID %#06x not found\n"), codec_id); // FIXME: none fatal } decoder->VideoCodec = video_codec; if (!(decoder->VideoCtx = avcodec_alloc_context3(video_codec))) { Fatal(_("codec: can't allocate video codec context\n")); } // FIXME: for software decoder use all cpus, otherwise 1 decoder->VideoCtx->thread_count = 1; pthread_mutex_lock(&CodecLockMutex); // open codec #if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,5,0) if (avcodec_open(decoder->VideoCtx, video_codec) < 0) { pthread_mutex_unlock(&CodecLockMutex); Fatal(_("codec: can't open video codec!\n")); } #else if (video_codec->capabilities & (CODEC_CAP_HWACCEL_VDPAU | CODEC_CAP_HWACCEL)) { Debug(3, "codec: video mpeg hack active\n"); // HACK around badly placed checks in mpeg_mc_decode_init // taken from mplayer vd_ffmpeg.c decoder->VideoCtx->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; decoder->VideoCtx->thread_count = 1; decoder->VideoCtx->active_thread_type = 0; } if (avcodec_open2(decoder->VideoCtx, video_codec, NULL) < 0) { pthread_mutex_unlock(&CodecLockMutex); Fatal(_("codec: can't open video codec!\n")); } #endif pthread_mutex_unlock(&CodecLockMutex); decoder->VideoCtx->opaque = decoder; // our structure Debug(3, "codec: video '%s'\n", decoder->VideoCodec->long_name); if (codec_id == AV_CODEC_ID_H264) { // 2.53 Ghz CPU is too slow for this codec at 1080i //decoder->VideoCtx->skip_loop_filter = AVDISCARD_ALL; //decoder->VideoCtx->skip_loop_filter = AVDISCARD_BIDIR; } if (video_codec->capabilities & CODEC_CAP_TRUNCATED) { Debug(3, "codec: video can use truncated packets\n"); #ifndef USE_MPEG_COMPLETE // we send incomplete frames, for old PES recordings // this breaks the decoder for some stations decoder->VideoCtx->flags |= CODEC_FLAG_TRUNCATED; #endif } // FIXME: own memory management for video frames. if (video_codec->capabilities & CODEC_CAP_DR1) { Debug(3, "codec: can use own buffer management\n"); } if (video_codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) { Debug(3, "codec: can export data for HW decoding (VDPAU)\n"); } #ifdef CODEC_CAP_FRAME_THREADS if (video_codec->capabilities & CODEC_CAP_FRAME_THREADS) { Debug(3, "codec: codec supports frame threads\n"); } #endif //decoder->VideoCtx->debug = FF_DEBUG_STARTCODE; //decoder->VideoCtx->err_recognition |= AV_EF_EXPLODE; if (video_codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) { // FIXME: get_format never called. decoder->VideoCtx->get_format = Codec_get_format; decoder->VideoCtx->get_buffer = Codec_get_buffer; decoder->VideoCtx->release_buffer = Codec_release_buffer; decoder->VideoCtx->reget_buffer = Codec_get_buffer; decoder->VideoCtx->draw_horiz_band = Codec_draw_horiz_band; decoder->VideoCtx->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; decoder->VideoCtx->thread_count = 1; decoder->VideoCtx->active_thread_type = 0; } else { decoder->VideoCtx->get_format = Codec_get_format; decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder); } // our pixel format video hardware decoder hook if (decoder->VideoCtx->hwaccel_context) { decoder->VideoCtx->get_format = Codec_get_format; decoder->VideoCtx->get_buffer = Codec_get_buffer; decoder->VideoCtx->release_buffer = Codec_release_buffer; decoder->VideoCtx->reget_buffer = Codec_get_buffer; #if 0 decoder->VideoCtx->thread_count = 1; decoder->VideoCtx->draw_horiz_band = NULL; decoder->VideoCtx->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; //decoder->VideoCtx->flags |= CODEC_FLAG_EMU_EDGE; #endif } // // Prepare frame buffer for decoder // #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) if (!(decoder->Frame = av_frame_alloc())) { Fatal(_("codec: can't allocate video decoder frame buffer\n")); } #else if (!(decoder->Frame = avcodec_alloc_frame())) { Fatal(_("codec: can't allocate video decoder frame buffer\n")); } #endif // reset buggy ffmpeg/libav flag decoder->GetFormatDone = 0; #ifdef FFMPEG_WORKAROUND_ARTIFACTS decoder->FirstKeyFrame = 1; #endif } /** ** Close video decoder. ** ** @param video_decoder private video decoder */ void CodecVideoClose(VideoDecoder * video_decoder) { // FIXME: play buffered data #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) av_frame_free(&video_decoder->Frame); // callee does checks #else av_freep(&video_decoder->Frame); #endif if (video_decoder->VideoCtx) { pthread_mutex_lock(&CodecLockMutex); avcodec_close(video_decoder->VideoCtx); av_freep(&video_decoder->VideoCtx); pthread_mutex_unlock(&CodecLockMutex); } } #if 0 /** ** Display pts... ** ** ffmpeg-0.9 pts always AV_NOPTS_VALUE ** ffmpeg-0.9 pkt_pts nice monotonic (only with HD) ** ffmpeg-0.9 pkt_dts wild jumping -160 - 340 ms ** ** libav 0.8_pre20111116 pts always AV_NOPTS_VALUE ** libav 0.8_pre20111116 pkt_pts always 0 (could be fixed?) ** libav 0.8_pre20111116 pkt_dts wild jumping -160 - 340 ms */ void DisplayPts(AVCodecContext * video_ctx, AVFrame * frame) { int ms_delay; int64_t pts; static int64_t last_pts; pts = frame->pkt_pts; if (pts == (int64_t) AV_NOPTS_VALUE) { printf("*"); } ms_delay = (1000 * video_ctx->time_base.num) / video_ctx->time_base.den; ms_delay += frame->repeat_pict * ms_delay / 2; printf("codec: PTS %s%s %" PRId64 " %d %d/%d %dms\n", frame->repeat_pict ? "r" : " ", frame->interlaced_frame ? "I" : " ", pts, (int)(pts - last_pts) / 90, video_ctx->time_base.num, video_ctx->time_base.den, ms_delay); if (pts != (int64_t) AV_NOPTS_VALUE) { last_pts = pts; } } #endif /** ** Decode a video packet. ** ** @param decoder video decoder data ** @param avpkt video packet */ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt) { AVCodecContext *video_ctx; AVFrame *frame; int used; int got_frame; AVPacket pkt[1]; video_ctx = decoder->VideoCtx; frame = decoder->Frame; *pkt = *avpkt; // use copy next_part: // FIXME: this function can crash with bad packets used = avcodec_decode_video2(video_ctx, frame, &got_frame, pkt); Debug(4, "%s: %p %d -> %d %d\n", __FUNCTION__, pkt->data, pkt->size, used, got_frame); if (used < 0) { Debug(3, "codec: bad video frame\n"); return; } if (got_frame) { // frame completed #ifdef FFMPEG_WORKAROUND_ARTIFACTS if (!CodecUsePossibleDefectFrames && decoder->FirstKeyFrame) { decoder->FirstKeyFrame++; if (frame->key_frame) { Debug(3, "codec: key frame after %d frames\n", decoder->FirstKeyFrame); decoder->FirstKeyFrame = 0; } } else { //DisplayPts(video_ctx, frame); VideoRenderFrame(decoder->HwDecoder, video_ctx, frame); } #else //DisplayPts(video_ctx, frame); VideoRenderFrame(decoder->HwDecoder, video_ctx, frame); #endif } else { // some frames are needed for references, interlaced frames ... // could happen with h264 dvb streams, just drop data. Debug(4, "codec: %8d incomplete interlaced frame %d bytes used\n", video_ctx->frame_number, used); } #if 1 // old code to support truncated or multi frame packets if (used != pkt->size) { // ffmpeg 0.8.7 dislikes our seq_end_h264 and enters endless loop here if (used == 0 && pkt->size == 5 && pkt->data[4] == 0x0A) { Warning("codec: ffmpeg 0.8.x workaround used\n"); return; } if (used >= 0 && used < pkt->size) { // some tv channels, produce this Debug(4, "codec: ooops didn't use complete video packet used %d of %d\n", used, pkt->size); pkt->size -= used; pkt->data += used; // FIXME: align problem? goto next_part; } } #endif // new AVFrame API #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) av_frame_unref(frame); #endif } /** ** Flush the video decoder. ** ** @param decoder video decoder data */ void CodecVideoFlushBuffers(VideoDecoder * decoder) { if (decoder->VideoCtx) { avcodec_flush_buffers(decoder->VideoCtx); } } //---------------------------------------------------------------------------- // Audio //---------------------------------------------------------------------------- #if 0 /// /// Audio decoder typedef. /// typedef struct _audio_decoder_ AudioDecoder; #endif /// /// Audio decoder structure. /// struct _audio_decoder_ { AVCodec *AudioCodec; ///< audio codec AVCodecContext *AudioCtx; ///< audio codec context char Passthrough; ///< current pass-through flags int SampleRate; ///< current stream sample rate int Channels; ///< current stream channels int HwSampleRate; ///< hw sample rate int HwChannels; ///< hw channels #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) AVFrame *Frame; ///< decoded audio frame buffer #endif #if !defined(USE_SWRESAMPLE) && !defined(USE_AVRESAMPLE) ReSampleContext *ReSample; ///< old resampling context #endif #ifdef USE_SWRESAMPLE #if LIBSWRESAMPLE_VERSION_INT < AV_VERSION_INT(0, 15, 100) struct SwrContext *Resample; ///< ffmpeg software resample context #else SwrContext *Resample; ///< ffmpeg software resample context #endif #endif #ifdef USE_AVRESAMPLE AVAudioResampleContext *Resample; ///< libav software resample context #endif uint16_t Spdif[24576 / 2]; ///< SPDIF output buffer int SpdifIndex; ///< index into SPDIF output buffer int SpdifCount; ///< SPDIF repeat counter int64_t LastDelay; ///< last delay struct timespec LastTime; ///< last time int64_t LastPTS; ///< last PTS int Drift; ///< accumulated audio drift int DriftCorr; ///< audio drift correction value int DriftFrac; ///< audio drift fraction for ac3 #if !defined(USE_SWRESAMPLE) && !defined(USE_AVRESAMPLE) struct AVResampleContext *AvResample; ///< second audio resample context #define MAX_CHANNELS 8 ///< max number of channels supported int16_t *Buffer[MAX_CHANNELS]; ///< deinterleave sample buffers int BufferSize; ///< size of sample buffer int16_t *Remain[MAX_CHANNELS]; ///< filter remaining samples int RemainSize; ///< size of remain buffer int RemainCount; ///< number of remaining samples #endif }; /// /// IEC Data type enumeration. /// enum IEC61937 { IEC61937_AC3 = 0x01, ///< AC-3 data // FIXME: more data types IEC61937_EAC3 = 0x15, ///< E-AC-3 data }; #ifdef USE_AUDIO_DRIFT_CORRECTION #define CORRECT_PCM 1 ///< do PCM audio-drift correction #define CORRECT_AC3 2 ///< do AC-3 audio-drift correction static char CodecAudioDrift; ///< flag: enable audio-drift correction #else static const int CodecAudioDrift = 0; #endif #ifdef USE_PASSTHROUGH /// /// Pass-through flags: CodecPCM, CodecAC3, CodecEAC3, ... /// static char CodecPassthrough; #else static const int CodecPassthrough = 0; #endif static char CodecDownmix; ///< enable AC-3 decoder downmix /** ** Allocate a new audio decoder context. ** ** @returns private decoder pointer for audio decoder. */ AudioDecoder *CodecAudioNewDecoder(void) { AudioDecoder *audio_decoder; if (!(audio_decoder = calloc(1, sizeof(*audio_decoder)))) { Fatal(_("codec: can't allocate audio decoder\n")); } #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) if (!(audio_decoder->Frame = av_frame_alloc())) { Fatal(_("codec: can't allocate audio decoder frame buffer\n")); } #endif return audio_decoder; } /** ** Deallocate an audio decoder context. ** ** @param decoder private audio decoder */ void CodecAudioDelDecoder(AudioDecoder * decoder) { #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56,28,1) av_frame_free(&decoder->Frame); // callee does checks #endif free(decoder); } /** ** Open audio decoder. ** ** @param audio_decoder private audio decoder ** @param codec_id audio codec id */ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id) { AVCodec *audio_codec; Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id)); if (!(audio_codec = avcodec_find_decoder(codec_id))) { Fatal(_("codec: codec ID %#06x not found\n"), codec_id); // FIXME: errors aren't fatal } audio_decoder->AudioCodec = audio_codec; if (!(audio_decoder->AudioCtx = avcodec_alloc_context3(audio_codec))) { Fatal(_("codec: can't allocate audio codec context\n")); } if (CodecDownmix) { #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(53,61,100) audio_decoder->AudioCtx->request_channels = 2; #endif audio_decoder->AudioCtx->request_channel_layout = AV_CH_LAYOUT_STEREO_DOWNMIX; } #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53,61,100) // this has no effect (with ffmpeg and libav) // audio_decoder->AudioCtx->request_sample_fmt = AV_SAMPLE_FMT_S16; #endif pthread_mutex_lock(&CodecLockMutex); // open codec #if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,5,0) if (avcodec_open(audio_decoder->AudioCtx, audio_codec) < 0) { pthread_mutex_unlock(&CodecLockMutex); Fatal(_("codec: can't open audio codec\n")); } #else if (1) { AVDictionary *av_dict; av_dict = NULL; // FIXME: import settings //av_dict_set(&av_dict, "dmix_mode", "0", 0); //av_dict_set(&av_dict, "ltrt_cmixlev", "1.414", 0); //av_dict_set(&av_dict, "loro_cmixlev", "1.414", 0); if (avcodec_open2(audio_decoder->AudioCtx, audio_codec, &av_dict) < 0) { pthread_mutex_unlock(&CodecLockMutex); Fatal(_("codec: can't open audio codec\n")); } av_dict_free(&av_dict); } #endif pthread_mutex_unlock(&CodecLockMutex); Debug(3, "codec: audio '%s'\n", audio_decoder->AudioCodec->long_name); if (audio_codec->capabilities & CODEC_CAP_TRUNCATED) { Debug(3, "codec: audio can use truncated packets\n"); // we send only complete frames // audio_decoder->AudioCtx->flags |= CODEC_FLAG_TRUNCATED; } audio_decoder->SampleRate = 0; audio_decoder->Channels = 0; audio_decoder->HwSampleRate = 0; audio_decoder->HwChannels = 0; audio_decoder->LastDelay = 0; } /** ** Close audio decoder. ** ** @param audio_decoder private audio decoder */ void CodecAudioClose(AudioDecoder * audio_decoder) { // FIXME: output any buffered data #if !defined(USE_SWRESAMPLE) && !defined(USE_AVRESAMPLE) if (audio_decoder->AvResample) { int ch; av_resample_close(audio_decoder->AvResample); audio_decoder->AvResample = NULL; audio_decoder->RemainCount = 0; audio_decoder->BufferSize = 0; audio_decoder->RemainSize = 0; for (ch = 0; ch < MAX_CHANNELS; ++ch) { free(audio_decoder->Buffer[ch]); audio_decoder->Buffer[ch] = NULL; free(audio_decoder->Remain[ch]); audio_decoder->Remain[ch] = NULL; } } if (audio_decoder->ReSample) { audio_resample_close(audio_decoder->ReSample); audio_decoder->ReSample = NULL; } #endif #ifdef USE_SWRESAMPLE if (audio_decoder->Resample) { swr_free(&audio_decoder->Resample); } #endif #ifdef USE_AVRESAMPLE if (audio_decoder->Resample) { avresample_free(&audio_decoder->Resample); } #endif if (audio_decoder->AudioCtx) { pthread_mutex_lock(&CodecLockMutex); avcodec_close(audio_decoder->AudioCtx); av_freep(&audio_decoder->AudioCtx); pthread_mutex_unlock(&CodecLockMutex); } } /** ** Set audio drift correction. ** ** @param mask enable mask (PCM, AC-3) */ void CodecSetAudioDrift(int mask) { #ifdef USE_AUDIO_DRIFT_CORRECTION CodecAudioDrift = mask & (CORRECT_PCM | CORRECT_AC3); #endif (void)mask; } /** ** Set audio pass-through. ** ** @param mask enable mask (PCM, AC-3, E-AC-3) */ void CodecSetAudioPassthrough(int mask) { #ifdef USE_PASSTHROUGH CodecPassthrough = mask & (CodecPCM | CodecAC3 | CodecEAC3); #endif (void)mask; } /** ** Set audio downmix. ** ** @param onoff enable/disable downmix. */ void CodecSetAudioDownmix(int onoff) { if (onoff == -1) { CodecDownmix ^= 1; return; } CodecDownmix = onoff; } /** ** Reorder audio frame. ** ** ffmpeg L R C Ls Rs -> alsa L R Ls Rs C ** ffmpeg L R C LFE Ls Rs -> alsa L R Ls Rs C LFE ** ffmpeg L R C LFE Ls Rs Rl Rr -> alsa L R Ls Rs C LFE Rl Rr ** ** @param buf[IN,OUT] sample buffer ** @param size size of sample buffer in bytes ** @param channels number of channels interleaved in sample buffer */ static void CodecReorderAudioFrame(int16_t * buf, int size, int channels) { int i; int c; int ls; int rs; int lfe; switch (channels) { case 5: size /= 2; for (i = 0; i < size; i += 5) { c = buf[i + 2]; ls = buf[i + 3]; rs = buf[i + 4]; buf[i + 2] = ls; buf[i + 3] = rs; buf[i + 4] = c; } break; case 6: size /= 2; for (i = 0; i < size; i += 6) { c = buf[i + 2]; lfe = buf[i + 3]; ls = buf[i + 4]; rs = buf[i + 5]; buf[i + 2] = ls; buf[i + 3] = rs; buf[i + 4] = c; buf[i + 5] = lfe; } break; case 8: size /= 2; for (i = 0; i < size; i += 8) { c = buf[i + 2]; lfe = buf[i + 3]; ls = buf[i + 4]; rs = buf[i + 5]; buf[i + 2] = ls; buf[i + 3] = rs; buf[i + 4] = c; buf[i + 5] = lfe; } break; } } /** ** Handle audio format changes helper. ** ** @param audio_decoder audio decoder data ** @param[out] passthrough pass-through output */ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder, int *passthrough) { const AVCodecContext *audio_ctx; int err; audio_ctx = audio_decoder->AudioCtx; Debug(3, "codec/audio: format change %s %dHz *%d channels%s%s%s%s%s\n", av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate, audio_ctx->channels, CodecPassthrough & CodecPCM ? " PCM" : "", CodecPassthrough & CodecMPA ? " MPA" : "", CodecPassthrough & CodecAC3 ? " AC-3" : "", CodecPassthrough & CodecEAC3 ? " E-AC-3" : "", CodecPassthrough ? " pass-through" : ""); *passthrough = 0; audio_decoder->SampleRate = audio_ctx->sample_rate; audio_decoder->HwSampleRate = audio_ctx->sample_rate; audio_decoder->Channels = audio_ctx->channels; audio_decoder->HwChannels = audio_ctx->channels; audio_decoder->Passthrough = CodecPassthrough; // SPDIF/HDMI pass-through if ((CodecPassthrough & CodecAC3 && audio_ctx->codec_id == AV_CODEC_ID_AC3) || (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3)) { if (audio_ctx->codec_id == AV_CODEC_ID_EAC3) { // E-AC-3 over HDMI some receivers need HBR audio_decoder->HwSampleRate *= 4; } audio_decoder->HwChannels = 2; audio_decoder->SpdifIndex = 0; // reset buffer audio_decoder->SpdifCount = 0; *passthrough = 1; } // channels/sample-rate not support? if ((err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) { // try E-AC-3 none HBR audio_decoder->HwSampleRate /= 4; if (audio_ctx->codec_id != AV_CODEC_ID_EAC3 || (err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) { Debug(3, "codec/audio: audio setup error\n"); // FIXME: handle errors audio_decoder->HwChannels = 0; audio_decoder->HwSampleRate = 0; return err; } } Debug(3, "codec/audio: resample %s %dHz *%d -> %s %dHz *%d\n", av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate, audio_ctx->channels, av_get_sample_fmt_name(AV_SAMPLE_FMT_S16), audio_decoder->HwSampleRate, audio_decoder->HwChannels); return 0; } /** ** Audio pass-through decoder helper. ** ** @param audio_decoder audio decoder data ** @param avpkt undecoded audio packet */ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder, const AVPacket * avpkt) { #ifdef USE_PASSTHROUGH const AVCodecContext *audio_ctx; audio_ctx = audio_decoder->AudioCtx; // SPDIF/HDMI passthrough if (CodecPassthrough & CodecAC3 && audio_ctx->codec_id == AV_CODEC_ID_AC3) { uint16_t *spdif; int spdif_sz; spdif = audio_decoder->Spdif; spdif_sz = 6144; #ifdef USE_AC3_DRIFT_CORRECTION // FIXME: this works with some TVs/AVReceivers // FIXME: write burst size drift correction, which should work with all if (CodecAudioDrift & CORRECT_AC3) { int x; x = (audio_decoder->DriftFrac + (audio_decoder->DriftCorr * spdif_sz)) / (10 * audio_decoder->HwSampleRate * 100); audio_decoder->DriftFrac = (audio_decoder->DriftFrac + (audio_decoder->DriftCorr * spdif_sz)) % (10 * audio_decoder->HwSampleRate * 100); // round to word border x *= audio_decoder->HwChannels * 4; if (x < -64) { // limit correction x = -64; } else if (x > 64) { x = 64; } spdif_sz += x; } #endif // build SPDIF header and append A52 audio to it // avpkt is the original data if (spdif_sz < avpkt->size + 8) { Error(_("codec/audio: decoded data smaller than encoded\n")); return -1; } spdif[0] = htole16(0xF872); // iec 61937 sync word spdif[1] = htole16(0x4E1F); spdif[2] = htole16(IEC61937_AC3 | (avpkt->data[5] & 0x07) << 8); spdif[3] = htole16(avpkt->size * 8); // copy original data for output // FIXME: not 100% sure, if endian is correct on not intel hardware swab(avpkt->data, spdif + 4, avpkt->size); // FIXME: don't need to clear always memset(spdif + 4 + avpkt->size / 2, 0, spdif_sz - 8 - avpkt->size); // don't play with the ac-3 samples AudioEnqueue(spdif, spdif_sz); return 1; } if (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3) { uint16_t *spdif; int spdif_sz; int repeat; // build SPDIF header and append A52 audio to it // avpkt is the original data spdif = audio_decoder->Spdif; spdif_sz = 24576; // 4 * 6144 if (audio_decoder->HwSampleRate == 48000) { spdif_sz = 6144; } if (spdif_sz < audio_decoder->SpdifIndex + avpkt->size + 8) { Error(_("codec/audio: decoded data smaller than encoded\n")); return -1; } // check if we must pack multiple packets repeat = 1; if ((avpkt->data[4] & 0xc0) != 0xc0) { // fscod static const uint8_t eac3_repeat[4] = { 6, 3, 2, 1 }; // fscod2 repeat = eac3_repeat[(avpkt->data[4] & 0x30) >> 4]; } // fprintf(stderr, "repeat %d %d\n", repeat, avpkt->size); // copy original data for output // pack upto repeat EAC-3 pakets into one IEC 61937 burst // FIXME: not 100% sure, if endian is correct on not intel hardware swab(avpkt->data, spdif + 4 + audio_decoder->SpdifIndex, avpkt->size); audio_decoder->SpdifIndex += avpkt->size; if (++audio_decoder->SpdifCount < repeat) { return 1; } spdif[0] = htole16(0xF872); // iec 61937 sync word spdif[1] = htole16(0x4E1F); spdif[2] = htole16(IEC61937_EAC3); spdif[3] = htole16(audio_decoder->SpdifIndex * 8); memset(spdif + 4 + audio_decoder->SpdifIndex / 2, 0, spdif_sz - 8 - audio_decoder->SpdifIndex); // don't play with the eac-3 samples AudioEnqueue(spdif, spdif_sz); audio_decoder->SpdifIndex = 0; audio_decoder->SpdifCount = 0; return 1; } #endif return 0; } #if !defined(USE_SWRESAMPLE) && !defined(USE_AVRESAMPLE) /** ** Set/update audio pts clock. ** ** @param audio_decoder audio decoder data ** @param pts presentation timestamp */ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts) { struct timespec nowtime; int64_t delay; int64_t tim_diff; int64_t pts_diff; int drift; int corr; AudioSetClock(pts); delay = AudioGetDelay(); if (!delay) { return; } clock_gettime(CLOCK_MONOTONIC, &nowtime); if (!audio_decoder->LastDelay) { audio_decoder->LastTime = nowtime; audio_decoder->LastPTS = pts; audio_decoder->LastDelay = delay; audio_decoder->Drift = 0; audio_decoder->DriftFrac = 0; Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90); return; } // collect over some time pts_diff = pts - audio_decoder->LastPTS; if (pts_diff < 10 * 1000 * 90) { return; } tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec); drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay; // adjust rounding error nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90); audio_decoder->LastTime = nowtime; audio_decoder->LastPTS = pts; audio_decoder->LastDelay = delay; if (0) { Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr); } // underruns and av_resample have the same time :((( if (abs(drift) > 10 * 90) { // drift too big, pts changed? Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90); audio_decoder->LastDelay = 0; #ifdef DEBUG corr = 0; // keep gcc happy #endif } else { drift += audio_decoder->Drift; audio_decoder->Drift = drift; corr = (10 * audio_decoder->HwSampleRate * drift) / (90 * 1000); // SPDIF/HDMI passthrough if ((CodecAudioDrift & CORRECT_AC3) && (!(CodecPassthrough & CodecAC3) || audio_decoder->AudioCtx->codec_id != AV_CODEC_ID_AC3) && (!(CodecPassthrough & CodecEAC3) || audio_decoder->AudioCtx->codec_id != AV_CODEC_ID_EAC3)) { audio_decoder->DriftCorr = -corr; } if (audio_decoder->DriftCorr < -20000) { // limit correction audio_decoder->DriftCorr = -20000; } else if (audio_decoder->DriftCorr > 20000) { audio_decoder->DriftCorr = 20000; } } // FIXME: this works with libav 0.8, and only with >10ms with ffmpeg 0.10 if (audio_decoder->AvResample && audio_decoder->DriftCorr) { int distance; // try workaround for buggy ffmpeg 0.10 if (abs(audio_decoder->DriftCorr) < 2000) { distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000); } else { distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000); } av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10, distance); } if (1) { static int c; if (!(c++ % 10)) { Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr); } } } /** ** Handle audio format changes. ** ** @param audio_decoder audio decoder data ** ** @note this is the old not good supported version */ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder) { int passthrough; const AVCodecContext *audio_ctx; int err; if (audio_decoder->ReSample) { audio_resample_close(audio_decoder->ReSample); audio_decoder->ReSample = NULL; } if (audio_decoder->AvResample) { av_resample_close(audio_decoder->AvResample); audio_decoder->AvResample = NULL; audio_decoder->RemainCount = 0; } audio_ctx = audio_decoder->AudioCtx; if ((err = CodecAudioUpdateHelper(audio_decoder, &passthrough))) { Debug(3, "codec/audio: resample %dHz *%d -> %dHz *%d\n", audio_ctx->sample_rate, audio_ctx->channels, audio_decoder->HwSampleRate, audio_decoder->HwChannels); if (err == 1) { audio_decoder->ReSample = av_audio_resample_init(audio_decoder->HwChannels, audio_ctx->channels, audio_decoder->HwSampleRate, audio_ctx->sample_rate, audio_ctx->sample_fmt, audio_ctx->sample_fmt, 16, 10, 0, 0.8); // libav-0.8_pre didn't support 6 -> 2 channels if (!audio_decoder->ReSample) { Error(_("codec/audio: resample setup error\n")); audio_decoder->HwChannels = 0; audio_decoder->HwSampleRate = 0; } return; } Debug(3, "codec/audio: audio setup error\n"); // FIXME: handle errors audio_decoder->HwChannels = 0; audio_decoder->HwSampleRate = 0; return; } if (passthrough) { // pass-through no conversion allowed return; } // prepare audio drift resample #ifdef USE_AUDIO_DRIFT_CORRECTION if (CodecAudioDrift & CORRECT_PCM) { if (audio_decoder->AvResample) { Error(_("codec/audio: overwrite resample\n")); } audio_decoder->AvResample = av_resample_init(audio_decoder->HwSampleRate, audio_decoder->HwSampleRate, 16, 10, 0, 0.8); if (!audio_decoder->AvResample) { Error(_("codec/audio: AvResample setup error\n")); } else { // reset drift to some default value audio_decoder->DriftCorr /= 2; audio_decoder->DriftFrac = 0; av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10, 10 * audio_decoder->HwSampleRate); } } #endif } /** ** Codec enqueue audio samples. ** ** @param audio_decoder audio decoder data ** @param data samples data ** @param count number of bytes in sample data */ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count) { #ifdef USE_AUDIO_DRIFT_CORRECTION if ((CodecAudioDrift & CORRECT_PCM) && audio_decoder->AvResample) { int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + FF_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16))); int16_t buftmp[MAX_CHANNELS][(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4]; int consumed; int i; int n; int ch; int bytes_n; bytes_n = count / audio_decoder->HwChannels; // resize sample buffer, if needed if (audio_decoder->RemainCount + bytes_n > audio_decoder->BufferSize) { audio_decoder->BufferSize = audio_decoder->RemainCount + bytes_n; for (ch = 0; ch < MAX_CHANNELS; ++ch) { audio_decoder->Buffer[ch] = realloc(audio_decoder->Buffer[ch], audio_decoder->BufferSize); } } // copy remaining bytes into sample buffer for (ch = 0; ch < audio_decoder->HwChannels; ++ch) { memcpy(audio_decoder->Buffer[ch], audio_decoder->Remain[ch], audio_decoder->RemainCount); } // deinterleave samples into sample buffer for (i = 0; i < bytes_n / 2; i++) { for (ch = 0; ch < audio_decoder->HwChannels; ++ch) { audio_decoder->Buffer[ch][audio_decoder->RemainCount / 2 + i] = data[i * audio_decoder->HwChannels + ch]; } } bytes_n += audio_decoder->RemainSize; n = 0; // keep gcc lucky // resample the sample buffer into tmp buffer for (ch = 0; ch < audio_decoder->HwChannels; ++ch) { n = av_resample(audio_decoder->AvResample, buftmp[ch], audio_decoder->Buffer[ch], &consumed, bytes_n / 2, sizeof(buftmp[ch]) / 2, ch == audio_decoder->HwChannels - 1); // fixme remaining channels if (bytes_n - consumed * 2 > audio_decoder->RemainSize) { audio_decoder->RemainSize = bytes_n - consumed * 2; } audio_decoder->Remain[ch] = realloc(audio_decoder->Remain[ch], audio_decoder->RemainSize); memcpy(audio_decoder->Remain[ch], audio_decoder->Buffer[ch] + consumed, audio_decoder->RemainSize); audio_decoder->RemainCount = audio_decoder->RemainSize; } // interleave samples from sample buffer for (i = 0; i < n; i++) { for (ch = 0; ch < audio_decoder->HwChannels; ++ch) { buf[i * audio_decoder->HwChannels + ch] = buftmp[ch][i]; } } n *= 2; n *= audio_decoder->HwChannels; if (!(audio_decoder->Passthrough & CodecPCM)) { CodecReorderAudioFrame(buf, n, audio_decoder->HwChannels); } AudioEnqueue(buf, n); return; } #endif if (!(audio_decoder->Passthrough & CodecPCM)) { CodecReorderAudioFrame(data, count, audio_decoder->HwChannels); } AudioEnqueue(data, count); } /** ** Decode an audio packet. ** ** PTS must be handled self. ** ** @param audio_decoder audio decoder data ** @param avpkt audio packet */ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt) { int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + FF_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16))); int buf_sz; int l; AVCodecContext *audio_ctx; audio_ctx = audio_decoder->AudioCtx; // FIXME: don't need to decode pass-through codecs buf_sz = sizeof(buf); l = avcodec_decode_audio3(audio_ctx, buf, &buf_sz, (AVPacket *) avpkt); if (avpkt->size != l) { if (l == AVERROR(EAGAIN)) { Error(_("codec: latm\n")); return; } if (l < 0) { // no audio frame could be decompressed Error(_("codec: error audio data\n")); return; } Error(_("codec: error more than one frame data\n")); } // update audio clock if (avpkt->pts != (int64_t) AV_NOPTS_VALUE) { CodecAudioSetClock(audio_decoder, avpkt->pts); } // FIXME: must first play remainings bytes, than change and play new. if (audio_decoder->Passthrough != CodecPassthrough || audio_decoder->SampleRate != audio_ctx->sample_rate || audio_decoder->Channels != audio_ctx->channels) { CodecAudioUpdateFormat(audio_decoder); } if (audio_decoder->HwSampleRate && audio_decoder->HwChannels) { // need to resample audio if (audio_decoder->ReSample) { int16_t outbuf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + FF_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16))); int outlen; // FIXME: libav-0.7.2 crash here outlen = audio_resample(audio_decoder->ReSample, outbuf, buf, buf_sz); #ifdef DEBUG if (outlen != buf_sz) { Debug(3, "codec/audio: possible fixed ffmpeg\n"); } #endif if (outlen) { // outlen seems to be wrong in ffmpeg-0.9 outlen /= audio_decoder->Channels * av_get_bytes_per_sample(audio_ctx->sample_fmt); outlen *= audio_decoder->HwChannels * av_get_bytes_per_sample(audio_ctx->sample_fmt); Debug(4, "codec/audio: %d -> %d\n", buf_sz, outlen); CodecAudioEnqueue(audio_decoder, outbuf, outlen); } } else { if (CodecAudioPassthroughHelper(audio_decoder, avpkt)) { return; } #if 0 // // old experimental code // if (1) { // FIXME: need to detect dts // copy original data for output // FIXME: buf is sint buf[0] = 0x72; buf[1] = 0xF8; buf[2] = 0x1F; buf[3] = 0x4E; buf[4] = 0x00; switch (avpkt->size) { case 512: buf[5] = 0x0B; break; case 1024: buf[5] = 0x0C; break; case 2048: buf[5] = 0x0D; break; default: Debug(3, "codec/audio: dts sample burst not supported\n"); buf[5] = 0x00; break; } buf[6] = (avpkt->size * 8); buf[7] = (avpkt->size * 8) >> 8; //buf[8] = 0x0B; //buf[9] = 0x77; //printf("%x %x\n", avpkt->data[0],avpkt->data[1]); // swab? memcpy(buf + 8, avpkt->data, avpkt->size); memset(buf + 8 + avpkt->size, 0, buf_sz - 8 - avpkt->size); } else if (1) { // FIXME: need to detect mp2 // FIXME: mp2 passthrough // see softhddev.c version/layer // 0x04 mpeg1 layer1 // 0x05 mpeg1 layer23 // 0x06 mpeg2 ext // 0x07 mpeg2.5 layer 1 // 0x08 mpeg2.5 layer 2 // 0x09 mpeg2.5 layer 3 } // DTS HD? // True HD? #endif CodecAudioEnqueue(audio_decoder, buf, buf_sz); } } } #endif #if defined(USE_SWRESAMPLE) || defined(USE_AVRESAMPLE) /** ** Set/update audio pts clock. ** ** @param audio_decoder audio decoder data ** @param pts presentation timestamp */ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts) { #ifdef USE_AUDIO_DRIFT_CORRECTION struct timespec nowtime; int64_t delay; int64_t tim_diff; int64_t pts_diff; int drift; int corr; AudioSetClock(pts); delay = AudioGetDelay(); if (!delay) { return; } clock_gettime(CLOCK_MONOTONIC, &nowtime); if (!audio_decoder->LastDelay) { audio_decoder->LastTime = nowtime; audio_decoder->LastPTS = pts; audio_decoder->LastDelay = delay; audio_decoder->Drift = 0; audio_decoder->DriftFrac = 0; Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90); return; } // collect over some time pts_diff = pts - audio_decoder->LastPTS; if (pts_diff < 10 * 1000 * 90) { return; } tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec); drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay; // adjust rounding error nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90); audio_decoder->LastTime = nowtime; audio_decoder->LastPTS = pts; audio_decoder->LastDelay = delay; if (0) { Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr); } // underruns and av_resample have the same time :((( if (abs(drift) > 10 * 90) { // drift too big, pts changed? Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90); audio_decoder->LastDelay = 0; #ifdef DEBUG corr = 0; // keep gcc happy #endif } else { drift += audio_decoder->Drift; audio_decoder->Drift = drift; corr = (10 * audio_decoder->HwSampleRate * drift) / (90 * 1000); // SPDIF/HDMI passthrough if ((CodecAudioDrift & CORRECT_AC3) && (!(CodecPassthrough & CodecAC3) || audio_decoder->AudioCtx->codec_id != AV_CODEC_ID_AC3) && (!(CodecPassthrough & CodecEAC3) || audio_decoder->AudioCtx->codec_id != AV_CODEC_ID_EAC3)) { audio_decoder->DriftCorr = -corr; } if (audio_decoder->DriftCorr < -20000) { // limit correction audio_decoder->DriftCorr = -20000; } else if (audio_decoder->DriftCorr > 20000) { audio_decoder->DriftCorr = 20000; } } #ifdef USE_SWRESAMPLE if (audio_decoder->Resample && audio_decoder->DriftCorr) { int distance; // try workaround for buggy ffmpeg 0.10 if (abs(audio_decoder->DriftCorr) < 2000) { distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000); } else { distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000); } if (swr_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) { Debug(3, "codec/audio: swr_set_compensation failed\n"); } } #endif #ifdef USE_AVRESAMPLE if (audio_decoder->Resample && audio_decoder->DriftCorr) { int distance; distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000); if (avresample_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) { Debug(3, "codec/audio: swr_set_compensation failed\n"); } } #endif if (1) { static int c; if (!(c++ % 10)) { Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr); } } #else AudioSetClock(pts); #endif } /** ** Handle audio format changes. ** ** @param audio_decoder audio decoder data */ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder) { int passthrough; const AVCodecContext *audio_ctx; if (CodecAudioUpdateHelper(audio_decoder, &passthrough)) { // FIXME: handle swresample format conversions. return; } if (passthrough) { // pass-through no conversion allowed return; } audio_ctx = audio_decoder->AudioCtx; #ifdef DEBUG if (audio_ctx->sample_fmt == AV_SAMPLE_FMT_S16 && audio_ctx->sample_rate == audio_decoder->HwSampleRate && !CodecAudioDrift) { // FIXME: use Resample only, when it is needed! fprintf(stderr, "no resample needed\n"); } #endif #ifdef USE_SWRESAMPLE audio_decoder->Resample = swr_alloc_set_opts(audio_decoder->Resample, audio_ctx->channel_layout, AV_SAMPLE_FMT_S16, audio_decoder->HwSampleRate, audio_ctx->channel_layout, audio_ctx->sample_fmt, audio_ctx->sample_rate, 0, NULL); if (audio_decoder->Resample) { swr_init(audio_decoder->Resample); } else { Error(_("codec/audio: can't setup resample\n")); } #endif #ifdef USE_AVRESAMPLE if (!(audio_decoder->Resample = avresample_alloc_context())) { Error(_("codec/audio: can't setup resample\n")); return; } av_opt_set_int(audio_decoder->Resample, "in_channel_layout", audio_ctx->channel_layout, 0); av_opt_set_int(audio_decoder->Resample, "in_sample_fmt", audio_ctx->sample_fmt, 0); av_opt_set_int(audio_decoder->Resample, "in_sample_rate", audio_ctx->sample_rate, 0); av_opt_set_int(audio_decoder->Resample, "out_channel_layout", audio_ctx->channel_layout, 0); av_opt_set_int(audio_decoder->Resample, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); av_opt_set_int(audio_decoder->Resample, "out_sample_rate", audio_decoder->HwSampleRate, 0); if (avresample_open(audio_decoder->Resample)) { avresample_free(&audio_decoder->Resample); audio_decoder->Resample = NULL; Error(_("codec/audio: can't open resample\n")); return; } #endif } /** ** Decode an audio packet. ** ** PTS must be handled self. ** ** @note the caller has not aligned avpkt and not cleared the end. ** ** @param audio_decoder audio decoder data ** @param avpkt audio packet */ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt) { AVCodecContext *audio_ctx; #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56,28,1) AVFrame frame[1]; #else AVFrame *frame; #endif int got_frame; int n; audio_ctx = audio_decoder->AudioCtx; // FIXME: don't need to decode pass-through codecs // new AVFrame API #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56,28,1) avcodec_get_frame_defaults(frame); #else frame = audio_decoder->Frame; av_frame_unref(frame); #endif got_frame = 0; n = avcodec_decode_audio4(audio_ctx, frame, &got_frame, (AVPacket *) avpkt); if (n != avpkt->size) { if (n == AVERROR(EAGAIN)) { Error(_("codec/audio: latm\n")); return; } if (n < 0) { // no audio frame could be decompressed Error(_("codec/audio: bad audio frame\n")); return; } Error(_("codec/audio: error more than one frame data\n")); } if (!got_frame) { Error(_("codec/audio: no frame\n")); return; } // update audio clock if (avpkt->pts != (int64_t) AV_NOPTS_VALUE) { CodecAudioSetClock(audio_decoder, avpkt->pts); } // format change if (audio_decoder->Passthrough != CodecPassthrough || audio_decoder->SampleRate != audio_ctx->sample_rate || audio_decoder->Channels != audio_ctx->channels) { CodecAudioUpdateFormat(audio_decoder); } if (!audio_decoder->HwSampleRate || !audio_decoder->HwChannels) { return; // unsupported sample format } if (CodecAudioPassthroughHelper(audio_decoder, avpkt)) { return; } if (0) { char strbuf[32]; int data_sz; int plane_sz; data_sz = av_samples_get_buffer_size(&plane_sz, audio_ctx->channels, frame->nb_samples, audio_ctx->sample_fmt, 1); fprintf(stderr, "codec/audio: sample_fmt %s\n", av_get_sample_fmt_name(audio_ctx->sample_fmt)); av_get_channel_layout_string(strbuf, 32, audio_ctx->channels, audio_ctx->channel_layout); fprintf(stderr, "codec/audio: layout %s\n", strbuf); fprintf(stderr, "codec/audio: channels %d samples %d plane %d data %d\n", audio_ctx->channels, frame->nb_samples, plane_sz, data_sz); } #ifdef USE_SWRESAMPLE if (audio_decoder->Resample) { uint8_t outbuf[8192 * 2 * 8]; uint8_t *out[1]; out[0] = outbuf; n = swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels), (const uint8_t **)frame->extended_data, frame->nb_samples); if (n > 0) { if (!(audio_decoder->Passthrough & CodecPCM)) { CodecReorderAudioFrame((int16_t *) outbuf, n * 2 * audio_decoder->HwChannels, audio_decoder->HwChannels); } AudioEnqueue(outbuf, n * 2 * audio_decoder->HwChannels); } return; } #endif #ifdef USE_AVRESAMPLE if (audio_decoder->Resample) { uint8_t outbuf[8192 * 2 * 8]; uint8_t *out[1]; out[0] = outbuf; n = avresample_convert(audio_decoder->Resample, out, 0, sizeof(outbuf) / (2 * audio_decoder->HwChannels), (uint8_t **) frame->extended_data, 0, frame->nb_samples); // FIXME: set out_linesize, in_linesize correct if (n > 0) { if (!(audio_decoder->Passthrough & CodecPCM)) { CodecReorderAudioFrame((int16_t *) outbuf, n * 2 * audio_decoder->HwChannels, audio_decoder->HwChannels); } AudioEnqueue(outbuf, n * 2 * audio_decoder->HwChannels); } return; } #endif #ifdef DEBUG // should be never reached fprintf(stderr, "oops\n"); #endif } #endif /** ** Flush the audio decoder. ** ** @param decoder audio decoder data */ void CodecAudioFlushBuffers(AudioDecoder * decoder) { avcodec_flush_buffers(decoder->AudioCtx); } //---------------------------------------------------------------------------- // Codec //---------------------------------------------------------------------------- /** ** Empty log callback */ static void CodecNoopCallback( __attribute__ ((unused)) void *ptr, __attribute__ ((unused)) int level, __attribute__ ((unused)) const char *fmt, __attribute__ ((unused)) va_list vl) { } /** ** Codec init */ void CodecInit(void) { pthread_mutex_init(&CodecLockMutex, NULL); #ifndef DEBUG // disable display ffmpeg error messages av_log_set_callback(CodecNoopCallback); #else (void)CodecNoopCallback; #endif avcodec_register_all(); // register all formats and codecs } /** ** Codec exit. */ void CodecExit(void) { pthread_mutex_destroy(&CodecLockMutex); } vdr-plugin-softhddevice/codec.h0000644000175000017500000000651312644034136016374 0ustar tobiastobias/// /// @file codec.h @brief Codec module headerfile /// /// Copyright (c) 2009 - 2013, 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: bdb4d18dbe371e497d039e45faa7c134b019860a $ ////////////////////////////////////////////////////////////////////////////// /// @addtogroup Codec /// @{ //---------------------------------------------------------------------------- // Defines //---------------------------------------------------------------------------- #define CodecPCM 0x01 ///< PCM bit mask #define CodecMPA 0x02 ///< MPA bit mask (planned) #define CodecAC3 0x04 ///< AC-3 bit mask #define CodecEAC3 0x08 ///< E-AC-3 bit mask #define CodecDTS 0x10 ///< DTS bit mask (planned) //---------------------------------------------------------------------------- // Typedefs //---------------------------------------------------------------------------- /// Video decoder typedef. typedef struct _video_decoder_ VideoDecoder; /// Audio decoder typedef. typedef struct _audio_decoder_ AudioDecoder; //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- /// Flag prefer fast xhannel switch extern char CodecUsePossibleDefectFrames; //---------------------------------------------------------------------------- // Prototypes //---------------------------------------------------------------------------- /// Allocate a new video decoder context. extern VideoDecoder *CodecVideoNewDecoder(VideoHwDecoder *); /// Deallocate a video decoder context. extern void CodecVideoDelDecoder(VideoDecoder *); /// Open video codec. extern void CodecVideoOpen(VideoDecoder *, int); /// Close video codec. extern void CodecVideoClose(VideoDecoder *); /// Decode a video packet. extern void CodecVideoDecode(VideoDecoder *, const AVPacket *); /// Flush video buffers. extern void CodecVideoFlushBuffers(VideoDecoder *); /// Allocate a new audio decoder context. extern AudioDecoder *CodecAudioNewDecoder(void); /// Deallocate an audio decoder context. extern void CodecAudioDelDecoder(AudioDecoder *); /// Open audio codec. extern void CodecAudioOpen(AudioDecoder *, int); /// Close audio codec. extern void CodecAudioClose(AudioDecoder *); /// Set audio drift correction. extern void CodecSetAudioDrift(int); /// Set audio pass-through. extern void CodecSetAudioPassthrough(int); /// Set audio downmix. extern void CodecSetAudioDownmix(int); /// Decode an audio packet. extern void CodecAudioDecode(AudioDecoder *, const AVPacket *); /// Flush audio buffers. extern void CodecAudioFlushBuffers(AudioDecoder *); /// Setup and initialize codec module. extern void CodecInit(void); /// Cleanup and exit codec module. extern void CodecExit(void); /// @} vdr-plugin-softhddevice/README.txt0000644000175000017500000002553412644034136016650 0ustar tobiastobias@file README.txt @brief A software HD output device for VDR Copyright (c) 2011 - 2013 by Johns. All Rights Reserved. Contributor(s): License: AGPLv3 This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. $Id: 5267da021a68b4a727b479417334bfbe67bbba14 $ A software and GPU emulated HD output device plugin for VDR. o Video decoder CPU / VDPAU o Video output VDPAU o Audio FFMpeg / Alsa / Analog o Audio FFMpeg / Alsa / Digital o Audio FFMpeg / OSS / Analog o HDMI/SPDIF pass-through o Software volume, compression, normalize and channel resample o VDR ScaleVideo API o Software deinterlacer Bob (VA-API only) o Autocrop o Grab image (VDPAU only) o Suspend / Dettach o Letterbox, Stretch and Center cut-out video display modes o atmo light support with plugin http://github.com/durchflieger/DFAtmo o PIP (Picture-in-Picture) (VDPAU only) o planned: Remove VA-API decoder and output support o planned: Video decoder OpenMax o planned: Video output Opengl / Xv o planned: Improved software deinterlacer (yadif or/and ffmpeg filters) o XvBa support is no longer planned (use future Radeon UVD VDPAU) To compile you must have the 'requires' installed. Good luck johns Quickstart: ----------- Just type make and use. Install: -------- 1a) git git clone git://projects.vdr-developer.org/vdr-plugin-softhddevice.git cd vdr-plugin-softhddevice make make install 2a) tarball Download latest version from: http://projects.vdr-developer.org/projects/plg-softhddevice/files tar vxf vdr-softhddevice-*.tar.bz2 cd softhddevice-* make make install You can edit Makefile to enable/disable VDPAU / VA-API / Alsa / OSS support. The default is to autodetect as much as possible. Setup: environment ------ Following is supported: DISPLAY=:0.0 x11 display name NO_HW=1 if set don't use the hardware decoders NO_MPEG_HW=1 if set don't use the hardware decoder for mpeg1/2 only if alsa is configured ALSA_DEVICE=default alsa PCM device name ALSA_PASSTHROUGH_DEVICE= alsa pass-though (AC-3,E-AC-3,DTS,...) device name ALSA_MIXER=default alsa control device name ALSA_MIXER_CHANNEL=PCM alsa control channel name only if oss is configured OSS_AUDIODEV=/dev/dsp oss dsp device name OSS_PASSTHROUGHDEV= oss pass-though (AC-3,E-AC-3,DTS,...) device name OSS_MIXERDEV=/dev/mixer oss mixer device name OSS_MIXER_CHANNEL=pcm oss mixer channel name Setup: /etc/vdr/setup.conf ------ Following is supported: softhddevice.MakePrimary = 0 0 = no change, 1 make softhddevice primary at start softhddevice.HideMainMenuEntry = 0 0 = show softhddevice main menu entry, 1 = hide entry softhddevice.Osd.Width = 0 0 = auto (=display, unscaled) n = fixed osd size scaled for display softhddevice.Osd.Height = 0 0 = auto (=display, unscaled) n = fixed osd size scaled for display of the next parameters is 576i, 720p, 1080i_fake or 1080i. 1080i_fake is 1280x1080 or 1440x1080 1080i is "real" 1920x1080 softhddevice..Scaling = 0 0 = normal, 1 = fast, 2 = HQ, 3 = anamorphic softhddevice..Deinterlace = 0 0 = bob, 1 = weave, 2 = temporal, 3 = temporal_spatial, 4 = software (only 0, 1, 4 supported with VA-API) softhddevice..SkipChromaDeinterlace = 0 0 = disabled, 1 = enabled (for slower cards, poor qualität) softhddevice..InverseTelecine = 0 0 = disabled, 1 = enabled softhddevice..Denoise = 0 0 .. 1000 noise reduction level (0 off, 1000 max) softhddevice..Sharpness = 0 -1000 .. 1000 noise reduction level (0 off, -1000 max blur, 1000 max sharp) softhddevice..CutTopBottom = 0 Cut 'n' pixels at at top and bottom of the video picture. softhddevice..CutLeftRight = 0 Cut 'n' pixels at at left and right of the video picture. softhddevice.AudioDelay = 0 +n or -n ms delay audio or delay video softhddevice.AudioPassthrough = 0 0 = none, 1 = PCM, 2 = MPA, 4 = AC-3, 8 = EAC-3, -X disable for PCM/AC-3/EAC-3 the pass-through device is used and the audio stream is passed undecoded to the output device. z.b. 12 = AC-3+EAC-3, 13 = PCM+AC-3+EAC-3 note: MPA/DTS/TrueHD/... aren't supported yet negative values disable passthrough softhddevice.AudioDownmix = 0 0 = none, 1 = downmix Use ffmpeg/libav downmix of AC-3/EAC-3 audio to stereo. softhddevice.AudioSoftvol = 0 0 = off, use hardware volume control 1 = on, use software volume control softhddevice.AudioNormalize = 0 0 = off, 1 = enable audio normalize softhddevice.AudioMaxNormalize = 0 maximal volume factor/1000 of the normalize filter softhddevice.AudioCompression = 0 0 = off, 1 = enable audio compression softhddevice.AudioMaxCompression = 0 maximal volume factor/1000 of the compression filter softhddevice.AudioStereoDescent = 0 reduce volume level (/1000) for stereo sources softhddevice.AudioBufferTime = 0 0 = default (336 ms) 1 - 1000 = size of the buffer in ms softhddevice.AutoCrop.Interval = 0 0 disables auto-crop n each 'n' frames auto-crop is checked. softhddevice.AutoCrop.Delay = 0 if auto-crop is over 'n' intervals the same, the cropping is used. softhddevice.AutoCrop.Tolerance = 0 if detected crop area is too small, cut max 'n' pixels at top and bottom. softhddevice.Background = 0 32bit RGBA background color (Red * 16777216 + Green * 65536 + Blue * 256 + Alpha) or hex RRGGBBAA grey 127 * 16777216 + 127 * 65536 + 127 * 256 => 2139062016 in the setup menu this is entered as (24bit RGB and 8bit Alpha) (Red * 65536 + Green * 256 + Blue) softhddevice.StudioLevels = 0 0 use PC levels (0-255) with vdpau. 1 use studio levels (16-235) with vdpau. softhddevice.Suspend.Close = 0 1 suspend closes x11 window, connection and audio device. (use svdrpsend plug softhddevice RESU to resume, if you have no lirc) softhddevice.Suspend.X11 = 0 1 suspend stops X11 server (not working yet) softhddevice.60HzMode = 0 0 disable 60Hz display mode 1 enable 60Hz display mode softhddevice.SoftStartSync = 0 0 disable soft start of audio/video sync 1 enable soft start of audio/video sync softhddevice.BlackPicture = 0 0 disable black picture during channel switch 1 enable black picture during channel switch softhddevice.ClearOnSwitch = 0 0 keep video und audio buffers during channel switch 1 clear video and audio buffers on channel switch softhddevice.Video4to3DisplayFormat = 1 0 pan and scan 1 letter box 2 center cut-out softhddevice.VideoOtherDisplayFormat = 1 0 pan and scan 1 pillar box 2 center cut-out softhddevice.pip.X = 79 softhddevice.pip.Y = 78 softhddevice.pip.Width = 18 softhddevice.pip.Height = 18 PIP pip window position and size in percent. softhddevice.pip.VideoX = 0 softhddevice.pip.VideoY = 0 softhddevice.pip.VideoWidth = 0 softhddevice.pip.VideoHeight = 0 PIP video window position and size in percent. softhddevice.pip.Alt.X = 0 softhddevice.pip.Alt.Y = 50 softhddevice.pip.Alt.Width = 0 softhddevice.pip.Alt.Height = 50 PIP alternative pip window position and size in percent. softhddevice.pip.Alt.VideoX = 0 softhddevice.pip.Alt.VideoY = 0 softhddevice.pip.Alt.VideoWidth = 0 softhddevice.pip.Alt.VideoHeight = 50 PIP alternative video window position and size in percent. Setup: /etc/vdr/remote.conf ------ Add "XKeySym." definitions to /etc/vdr/remote.conf to control the vdr and plugin with the connected input device. fe. XKeySym.Up Up XKeySym.Down Down ... Additional to the x11 input sends the window close button "Close". fe. XKeySym.Power Close Commandline: ------------ Use vdr -h to see the command line arguments supported by the plugin. -a audio_device Selects audio output module and device. "" to disable audio output /... to use oss audio module (if compiled with oss support) other to use alsa audio module (if compiled with alsa support) SVDRP: ------ Use 'svdrpsend.pl plug softhddevice HELP' or 'svdrpsend plug softhddevice HELP' to see the SVDRP commands help and which are supported by the plugin. Keymacros: ---------- See keymacros.conf how to setup the macros. This are the supported key sequences: @softhddevice Blue 1 0 disable pass-through @softhddevice Blue 1 1 enable pass-through @softhddevice Blue 1 2 toggle pass-through @softhddevice Blue 1 3 decrease audio delay by 10ms @softhddevice Blue 1 4 increase audio delay by 10ms @softhddevice Blue 1 5 toggle ac3 mixdown @softhddevice Blue 2 0 disable fullscreen @softhddevice Blue 2 1 enable fullscreen @softhddevice Blue 2 2 toggle fullscreen @softhddevice Blue 2 3 disable auto-crop @softhddevice Blue 2 4 enable auto-crop @softhddevice Blue 2 5 toggle auto-crop @softhddevice Blue 3 0 stretch 4:3 to 16:9 @softhddevice Blue 3 1 letter box 4:3 in 16:9 @softhddevice Blue 3 2 center cut-out 4:3 to 16:9 @softhddevice Blue 3 9 rotate 4:3 to 16:9 zoom mode Running: -------- Click into video window to toggle fullscreen/window mode, only if you have a window manager running. Warning: -------- libav is not supported, expect many bugs with it. Known Bugs: ----------- VA-API doesn't v-sync h264 interlaced streams vdr-image not working Requires: --------- media-video/vdr (version >=1.7.xx) Video Disk Recorder - turns a pc into a powerful set top box for DVB. http://www.tvdr.de/ media-video/ffmpeg (version >=0.7) Complete solution to record, convert and stream audio and video. Includes libavcodec and libswresample. http://ffmpeg.org media-libs/alsa-lib Advanced Linux Sound Architecture Library http://www.alsa-project.org or kernel support for oss/oss4 or alsa oss emulation x11-libs/libva (deprecated) Video Acceleration (VA) API for Linux http://www.freedesktop.org/wiki/Software/vaapi x11-libs/libva-intel-driver HW video decode support for Intel integrated graphics http://www.freedesktop.org/wiki/Software/vaapi or x11-libs/vdpau-video VDPAU Backend for Video Acceleration (VA) API http://www.freedesktop.org/wiki/Software/vaapi or x11-libs/xvba-video XVBA Backend for Video Acceleration (VA) API http://www.freedesktop.org/wiki/Software/vaapi x11-libs/libvdpau VDPAU wrapper and trace libraries http://www.freedesktop.org/wiki/Software/VDPAU x11-libs/libxcb, X C-language Bindings library http://xcb.freedesktop.org x11-libs/xcb-util, x11-libs/xcb-util-wm, x11-libs/xcb-util-keysyms X C-language Bindings library http://xcb.freedesktop.org Only versions >= 0.3.8 are good supported x11-libs/libX11 X.Org X11 library http://xorg.freedesktop.org GNU Make 3.xx http://www.gnu.org/software/make/make.html Optional: vdr-plugin-softhddevice/softhddevice.cpp0000644000175000017500000027715512644034136020335 0ustar tobiastobias/// /// @file softhddevice.cpp @brief A software HD device plugin for VDR. /// /// Copyright (c) 2011 - 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 70994d77440d1a19d3b6204a50e578c950008b8a $ ////////////////////////////////////////////////////////////////////////////// #define __STDC_CONSTANT_MACROS ///< needed for ffmpeg UINT64_C #include #include #include #include #include #include #ifdef HAVE_CONFIG #include "config.h" #endif #include "softhddev.h" #include "softhddevice.h" #include "softhddevice_service.h" extern "C" { #include #include #include "audio.h" #include "video.h" #include "codec.h" } #if APIVERSNUM >= 20301 #define MURKS -> #else #define MURKS . #define LOCK_CHANNELS_READ do { } while (0) #endif ////////////////////////////////////////////////////////////////////////////// /// vdr-plugin version number. /// Makefile extracts the version number for generating the file name /// for the distribution archive. static const char *const VERSION = "0.6.1rc1" #ifdef GIT_REV "-GIT" GIT_REV #endif ; /// vdr-plugin description. static const char *const DESCRIPTION = trNOOP("A software and GPU emulated HD device"); /// vdr-plugin text of main menu entry static const char *MAINMENUENTRY = trNOOP("SoftHdDevice"); /// single instance of softhddevice plugin device. static class cSoftHdDevice *MyDevice; ////////////////////////////////////////////////////////////////////////////// #define RESOLUTIONS 4 ///< number of resolutions /// resolutions names static const char *const Resolution[RESOLUTIONS] = { "576i", "720p", "1080i_fake", "1080i" }; static char ConfigMakePrimary; ///< config primary wanted static char ConfigHideMainMenuEntry; ///< config hide main menu entry static char ConfigDetachFromMainMenu; ///< detach from main menu entry instead of suspend static char ConfigSuspendClose; ///< suspend should close devices static char ConfigSuspendX11; ///< suspend should stop x11 static char Config4to3DisplayFormat = 1; ///< config 4:3 display format static char ConfigOtherDisplayFormat = 1; ///< config other display format static uint32_t ConfigVideoBackground; ///< config video background color static int ConfigOsdWidth; ///< config OSD width static int ConfigOsdHeight; ///< config OSD height static char ConfigVideoStudioLevels; ///< config use studio levels static char ConfigVideo60HzMode; ///< config use 60Hz display mode static char ConfigVideoSoftStartSync; ///< config use softstart sync static char ConfigVideoBlackPicture; ///< config enable black picture mode char ConfigVideoClearOnSwitch; ///< config enable Clear on channel switch static int ConfigVideoBrightness; ///< config video brightness static int ConfigVideoContrast = 1000; ///< config video contrast static int ConfigVideoSaturation = 1000; ///< config video saturation static int ConfigVideoHue; ///< config video hue /// config deinterlace static int ConfigVideoDeinterlace[RESOLUTIONS]; /// config skip chroma static int ConfigVideoSkipChromaDeinterlace[RESOLUTIONS]; /// config inverse telecine static int ConfigVideoInverseTelecine[RESOLUTIONS]; /// config denoise static int ConfigVideoDenoise[RESOLUTIONS]; /// config sharpen static int ConfigVideoSharpen[RESOLUTIONS]; /// config scaling static int ConfigVideoScaling[RESOLUTIONS]; /// config cut top and bottom pixels static int ConfigVideoCutTopBottom[RESOLUTIONS]; /// config cut left and right pixels static int ConfigVideoCutLeftRight[RESOLUTIONS]; static int ConfigAutoCropEnabled; ///< auto crop detection enabled static int ConfigAutoCropInterval; ///< auto crop detection interval static int ConfigAutoCropDelay; ///< auto crop detection delay static int ConfigAutoCropTolerance; ///< auto crop detection tolerance static int ConfigVideoAudioDelay; ///< config audio delay static char ConfigAudioDrift; ///< config audio drift static char ConfigAudioPassthrough; ///< config audio pass-through mask static char AudioPassthroughState; ///< flag audio pass-through on/off static char ConfigAudioDownmix; ///< config ffmpeg audio downmix static char ConfigAudioSoftvol; ///< config use software volume static char ConfigAudioNormalize; ///< config use normalize volume static int ConfigAudioMaxNormalize; ///< config max normalize factor static char ConfigAudioCompression; ///< config use volume compression static int ConfigAudioMaxCompression; ///< config max volume compression static int ConfigAudioStereoDescent; ///< config reduce stereo loudness int ConfigAudioBufferTime; ///< config size ms of audio buffer static int ConfigAudioAutoAES; ///< config automatic AES handling static char *ConfigX11Display; ///< config x11 display static char *ConfigAudioDevice; ///< config audio stereo device static char *ConfigPassthroughDevice; ///< config audio pass-through device #ifdef USE_PIP static int ConfigPipX = 100 - 3 - 18; ///< config pip pip x in % static int ConfigPipY = 100 - 4 - 18; ///< config pip pip y in % static int ConfigPipWidth = 18; ///< config pip pip width in % static int ConfigPipHeight = 18; ///< config pip pip height in % static int ConfigPipVideoX; ///< config pip video x in % static int ConfigPipVideoY; ///< config pip video y in % static int ConfigPipVideoWidth; ///< config pip video width in % static int ConfigPipVideoHeight; ///< config pip video height in % static int ConfigPipAltX; ///< config pip alt. pip x in % static int ConfigPipAltY = 50; ///< config pip alt. pip y in % static int ConfigPipAltWidth; ///< config pip alt. pip width in % static int ConfigPipAltHeight = 50; ///< config pip alt. pip height in % static int ConfigPipAltVideoX; ///< config pip alt. video x in % static int ConfigPipAltVideoY; ///< config pip alt. video y in % static int ConfigPipAltVideoWidth; ///< config pip alt. video width in % static int ConfigPipAltVideoHeight = 50; ///< config pip alt. video height in % #endif #ifdef USE_SCREENSAVER static char ConfigEnableDPMSatBlackScreen; ///< Enable DPMS(Screensaver) while displaying black screen(radio) #endif static volatile int DoMakePrimary; ///< switch primary device to this #define SUSPEND_EXTERNAL -1 ///< play external suspend mode #define NOT_SUSPENDED 0 ///< not suspend mode #define SUSPEND_NORMAL 1 ///< normal suspend mode #define SUSPEND_DETACHED 2 ///< detached suspend mode static signed char SuspendMode; ///< suspend mode ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // C Callbacks ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin remote class. */ class cSoftRemote:public cRemote { public: /** ** Soft device remote class constructor. ** ** @param name remote name */ cSoftRemote(const char *name):cRemote(name) { } /** ** Put keycode into vdr event queue. ** ** @param code key code ** @param repeat flag key repeated ** @param release flag key released */ bool Put(const char *code, bool repeat = false, bool release = false) { return cRemote::Put(code, repeat, release); } }; /** ** Feed key press as remote input (called from C part). ** ** @param keymap target keymap "XKeymap" name ** @param key pressed/released key name ** @param repeat repeated key flag ** @param release released key flag ** @param letter x11 character string (system setting locale) */ extern "C" void FeedKeyPress(const char *keymap, const char *key, int repeat, int release, const char *letter) { cRemote *remote; cSoftRemote *csoft; if (!keymap || !key) { return; } // find remote for (remote = Remotes.First(); remote; remote = Remotes.Next(remote)) { if (!strcmp(remote->Name(), keymap)) { break; } } // if remote not already exists, create it if (remote) { csoft = (cSoftRemote *) remote; } else { dsyslog("[softhddev]%s: remote '%s' not found\n", __FUNCTION__, keymap); csoft = new cSoftRemote(keymap); } //dsyslog("[softhddev]%s %s, %s, %s\n", __FUNCTION__, keymap, key, letter); if (key[1]) { // no single character if (!csoft->Put(key, repeat, release) && letter && !cRemote::IsLearning()) { cCharSetConv conv; unsigned code; code = Utf8CharGet(conv.Convert(letter)); if (code <= 0xFF) { cRemote::Put(KBDKEY(code)); // feed it for edit mode } } } else if (!csoft->Put(key, repeat, release)) { cRemote::Put(KBDKEY(key[0])); // feed it for edit mode } } ////////////////////////////////////////////////////////////////////////////// // OSD ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin OSD class. */ class cSoftOsd:public cOsd { public: static volatile char Dirty; ///< flag force redraw everything int OsdLevel; ///< current osd level FIXME: remove cSoftOsd(int, int, uint); ///< osd constructor virtual ~ cSoftOsd(void); ///< osd destructor /// set the sub-areas to the given areas virtual eOsdError SetAreas(const tArea *, int); virtual void Flush(void); ///< commits all data to the hardware virtual void SetActive(bool); ///< sets OSD to be the active one }; volatile char cSoftOsd::Dirty; ///< flag force redraw everything /** ** Sets this OSD to be the active one. ** ** @param on true on, false off ** ** @note only needed as workaround for text2skin plugin with ** undrawn areas. */ void cSoftOsd::SetActive(bool on) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d level %d\n", __FUNCTION__, on, OsdLevel); #endif if (Active() == on) { return; // already active, no action } cOsd::SetActive(on); if (on) { Dirty = 1; // only flush here if there are already bitmaps if (GetBitmap(0)) { Flush(); } } else { OsdClose(); } } /** ** Constructor OSD. ** ** Initializes the OSD with the given coordinates. ** ** @param left x-coordinate of osd on display ** @param top y-coordinate of osd on display ** @param level level of the osd (smallest is shown) */ cSoftOsd::cSoftOsd(int left, int top, uint level) :cOsd(left, top, level) { #ifdef OSD_DEBUG /* FIXME: OsdWidth/OsdHeight not correct! */ dsyslog("[softhddev]%s: %dx%d%+d%+d, %d\n", __FUNCTION__, OsdWidth(), OsdHeight(), left, top, level); #endif OsdLevel = level; } /** ** OSD Destructor. ** ** Shuts down the OSD. */ cSoftOsd::~cSoftOsd(void) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: level %d\n", __FUNCTION__, OsdLevel); #endif SetActive(false); // done by SetActive: OsdClose(); #ifdef USE_YAEPG // support yaepghd, video window if (vidWin.bpp) { // restore fullsized video int width; int height; double video_aspect; ::GetOsdSize(&width, &height, &video_aspect); // works osd relative ::ScaleVideo(0, 0, width, height); } #endif } /** +* Set the sub-areas to the given areas */ eOsdError cSoftOsd::SetAreas(const tArea * areas, int n) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d areas \n", __FUNCTION__, n); #endif // clear old OSD, when new areas are set if (!IsTrueColor()) { cBitmap *bitmap; int i; for (i = 0; (bitmap = GetBitmap(i)); i++) { bitmap->Clean(); } } if (Active()) { VideoOsdClear(); Dirty = 1; } return cOsd::SetAreas(areas, n); } /** ** Actually commits all data to the OSD hardware. */ void cSoftOsd::Flush(void) { cPixmapMemory *pm; #ifdef OSD_DEBUG dsyslog("[softhddev]%s: level %d active %d\n", __FUNCTION__, OsdLevel, Active()); #endif if (!Active()) { // this osd is not active return; } #ifdef USE_YAEPG // support yaepghd, video window if (vidWin.bpp) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, vidWin.Width(), vidWin.Height(), vidWin.x1, vidWin.y2); #endif // FIXME: vidWin is OSD relative not video window. // FIXME: doesn't work if fixed OSD width != real window width // FIXME: solved in VideoSetOutputPosition ::ScaleVideo(Left() + vidWin.x1, Top() + vidWin.y1, vidWin.Width(), vidWin.Height()); } #endif if (!IsTrueColor()) { cBitmap *bitmap; int i; #ifdef OSD_DEBUG static char warned; if (!warned) { dsyslog("[softhddev]%s: FIXME: should be truecolor\n", __FUNCTION__); warned = 1; } #endif // draw all bitmaps for (i = 0; (bitmap = GetBitmap(i)); ++i) { uint8_t *argb; int xs; int ys; int x; int y; int w; int h; int x1; int y1; int x2; int y2; // get dirty bounding box if (Dirty) { // forced complete update x1 = 0; y1 = 0; x2 = bitmap->Width() - 1; y2 = bitmap->Height() - 1; } else if (!bitmap->Dirty(x1, y1, x2, y2)) { continue; // nothing dirty continue } // convert and upload only visible dirty areas xs = bitmap->X0() + Left(); ys = bitmap->Y0() + Top(); // FIXME: negtative position bitmaps w = x2 - x1 + 1; h = y2 - y1 + 1; // clip to screen if (1) { // just for the case it makes trouble int width; int height; double video_aspect; if (xs < 0) { if (xs + x1 < 0) { x1 -= xs + x1; w += xs + x1; if (w <= 0) { continue; } } xs = 0; } if (ys < 0) { if (ys + y1 < 0) { y1 -= ys + y1; h += ys + y1; if (h <= 0) { continue; } } ys = 0; } ::GetOsdSize(&width, &height, &video_aspect); if (w > width - xs - x1) { w = width - xs - x1; if (w <= 0) { continue; } x2 = x1 + w - 1; } if (h > height - ys - y1) { h = height - ys - y1; if (h <= 0) { continue; } y2 = y1 + h - 1; } } #ifdef DEBUG if (w > bitmap->Width() || h > bitmap->Height()) { esyslog(tr("[softhddev]: dirty area too big\n")); abort(); } #endif argb = (uint8_t *) malloc(w * h * sizeof(uint32_t)); for (y = y1; y <= y2; ++y) { for (x = x1; x <= x2; ++x) { ((uint32_t *) argb)[x - x1 + (y - y1) * w] = bitmap->GetColor(x, y); } } #ifdef OSD_DEBUG dsyslog("[softhddev]%s: draw %dx%d%+d%+d bm\n", __FUNCTION__, w, h, xs + x1, ys + y1); #endif OsdDrawARGB(0, 0, w, h, w * sizeof(uint32_t), argb, xs + x1, ys + y1); bitmap->Clean(); // FIXME: reuse argb free(argb); } Dirty = 0; return; } LOCK_PIXMAPS; while ((pm = (dynamic_cast < cPixmapMemory * >(RenderPixmaps())))) { int xp; int yp; int stride; int x; int y; int w; int h; x = pm->ViewPort().X(); y = pm->ViewPort().Y(); w = pm->ViewPort().Width(); h = pm->ViewPort().Height(); stride = w * sizeof(tColor); // clip to osd xp = 0; if (x < 0) { xp = -x; w -= xp; x = 0; } yp = 0; if (y < 0) { yp = -y; h -= yp; y = 0; } if (w > Width() - x) { w = Width() - x; } if (h > Height() - y) { h = Height() - y; } x += Left(); y += Top(); // clip to screen if (1) { // just for the case it makes trouble // and it can happen! int width; int height; double video_aspect; if (x < 0) { w += x; xp += -x; x = 0; } if (y < 0) { h += y; yp += -y; y = 0; } ::GetOsdSize(&width, &height, &video_aspect); if (w > width - x) { w = width - x; } if (h > height - y) { h = height - y; } } #ifdef OSD_DEBUG dsyslog("[softhddev]%s: draw %dx%d%+d%+d*%d -> %+d%+d %p\n", __FUNCTION__, w, h, xp, yp, stride, x, y, pm->Data()); #endif OsdDrawARGB(xp, yp, w, h, stride, pm->Data(), x, y); #if APIVERSNUM >= 20110 DestroyPixmap(pm); #else delete pm; #endif } Dirty = 0; } ////////////////////////////////////////////////////////////////////////////// // OSD provider ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin OSD provider class. */ class cSoftOsdProvider:public cOsdProvider { private: static cOsd *Osd; ///< single OSD public: virtual cOsd * CreateOsd(int, int, uint); virtual bool ProvidesTrueColor(void); cSoftOsdProvider(void); ///< OSD provider constructor //virtual ~cSoftOsdProvider(); ///< OSD provider destructor }; cOsd *cSoftOsdProvider::Osd; ///< single osd /** ** Create a new OSD. ** ** @param left x-coordinate of OSD ** @param top y-coordinate of OSD ** @param level layer level of OSD */ cOsd *cSoftOsdProvider::CreateOsd(int left, int top, uint level) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d, %d, %d\n", __FUNCTION__, left, top, level); #endif return Osd = new cSoftOsd(left, top, level); } /** ** Check if this OSD provider is able to handle a true color OSD. ** ** @returns true we are able to handle a true color OSD. */ bool cSoftOsdProvider::ProvidesTrueColor(void) { return true; } /** ** Create cOsdProvider class. */ cSoftOsdProvider::cSoftOsdProvider(void) : cOsdProvider() { #ifdef OSD_DEBUG dsyslog("[softhddev]%s:\n", __FUNCTION__); #endif } /** ** Destroy cOsdProvider class. cSoftOsdProvider::~cSoftOsdProvider() { dsyslog("[softhddev]%s:\n", __FUNCTION__); } */ ////////////////////////////////////////////////////////////////////////////// // cMenuSetupPage ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin menu setup page class. */ class cMenuSetupSoft:public cMenuSetupPage { protected: /// /// local copies of global setup variables: /// @{ int General; int MakePrimary; int HideMainMenuEntry; int DetachFromMainMenu; int OsdSize; int OsdWidth; int OsdHeight; int SuspendClose; int SuspendX11; int Video; int Video4to3DisplayFormat; int VideoOtherDisplayFormat; uint32_t Background; uint32_t BackgroundAlpha; int StudioLevels; int _60HzMode; int SoftStartSync; int BlackPicture; int ClearOnSwitch; int Brightness; int Contrast; int Saturation; int Hue; int ResolutionShown[RESOLUTIONS]; int Scaling[RESOLUTIONS]; int Deinterlace[RESOLUTIONS]; int SkipChromaDeinterlace[RESOLUTIONS]; int InverseTelecine[RESOLUTIONS]; int Denoise[RESOLUTIONS]; int Sharpen[RESOLUTIONS]; int CutTopBottom[RESOLUTIONS]; int CutLeftRight[RESOLUTIONS]; int AutoCropInterval; int AutoCropDelay; int AutoCropTolerance; int Audio; int AudioDelay; int AudioDrift; int AudioPassthroughDefault; int AudioPassthroughPCM; int AudioPassthroughAC3; int AudioPassthroughEAC3; int AudioDownmix; int AudioSoftvol; int AudioNormalize; int AudioMaxNormalize; int AudioCompression; int AudioMaxCompression; int AudioStereoDescent; int AudioBufferTime; int AudioAutoAES; #ifdef USE_PIP int Pip; int PipX; int PipY; int PipWidth; int PipHeight; int PipVideoX; int PipVideoY; int PipVideoWidth; int PipVideoHeight; int PipAltX; int PipAltY; int PipAltWidth; int PipAltHeight; int PipAltVideoX; int PipAltVideoY; int PipAltVideoWidth; int PipAltVideoHeight; #endif #ifdef USE_SCREENSAVER int EnableDPMSatBlackScreen; #endif /// @} private: inline cOsdItem * CollapsedItem(const char *, int &, const char * = NULL); void Create(void); // create sub-menu protected: virtual void Store(void); public: cMenuSetupSoft(void); virtual eOSState ProcessKey(eKeys); // handle input }; /** ** Create a seperator item. ** ** @param label text inside separator */ static inline cOsdItem *SeparatorItem(const char *label) { cOsdItem *item; item = new cOsdItem(cString::sprintf("* %s: ", label)); item->SetSelectable(false); return item; } /** ** Create a collapsed item. ** ** @param label text inside collapsed ** @param flag flag handling collapsed or opened ** @param msg open message */ inline cOsdItem *cMenuSetupSoft::CollapsedItem(const char *label, int &flag, const char *msg) { cOsdItem *item; item = new cMenuEditBoolItem(cString::sprintf("* %s", label), &flag, msg ? msg : tr("show"), tr("hide")); return item; } /** ** Create setup menu. */ void cMenuSetupSoft::Create(void) { static const char *const osd_size[] = { "auto", "1920x1080", "1280x720", "custom", }; static const char *const video_display_formats_4_3[] = { "pan&scan", "letterbox", "center cut-out", }; static const char *const video_display_formats_16_9[] = { "pan&scan", "pillarbox", "center cut-out", }; static const char *const deinterlace[] = { "Bob", "Weave/None", "Temporal", "TemporalSpatial", "Software Bob", "Software Spatial", }; static const char *const deinterlace_short[] = { "B", "W", "T", "T+S", "S+B", "S+S", }; static const char *const scaling[] = { "Normal", "Fast", "HQ", "Anamorphic" }; static const char *const scaling_short[] = { "N", "F", "HQ", "A" }; static const char *const audiodrift[] = { "None", "PCM", "AC-3", "PCM + AC-3" }; static const char *const resolution[RESOLUTIONS] = { "576i", "720p", "fake 1080i", "1080i" }; int current; int i; current = Current(); // get current menu item index Clear(); // clear the menu // // general // Add(CollapsedItem(tr("General"), General)); if (General) { Add(new cMenuEditBoolItem(tr("Make primary device"), &MakePrimary, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Hide main menu entry"), &HideMainMenuEntry, trVDR("no"), trVDR("yes"))); // // osd // Add(new cMenuEditStraItem(tr("Osd size"), &OsdSize, 4, osd_size)); if (OsdSize == 3) { Add(new cMenuEditIntItem(tr("Osd width"), &OsdWidth, 0, 4096)); Add(new cMenuEditIntItem(tr("Osd height"), &OsdHeight, 0, 4096)); } // // suspend // Add(SeparatorItem(tr("Suspend"))); Add(new cMenuEditBoolItem(tr("Detach from main menu entry"), &DetachFromMainMenu, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Suspend closes video+audio"), &SuspendClose, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Suspend stops x11"), &SuspendX11, trVDR("no"), trVDR("yes"))); } // // video // Add(CollapsedItem(tr("Video"), Video)); if (Video) { #ifdef USE_SCREENSAVER Add(new cMenuEditBoolItem(tr("Enable Screensaver(DPMS) at black screen"), &EnableDPMSatBlackScreen, trVDR("no"), trVDR("yes"))); #endif Add(new cMenuEditStraItem(trVDR("4:3 video display format"), &Video4to3DisplayFormat, 3, video_display_formats_4_3)); Add(new cMenuEditStraItem(trVDR("16:9+other video display format"), &VideoOtherDisplayFormat, 3, video_display_formats_16_9)); // FIXME: switch config gray/color configuration Add(new cMenuEditIntItem(tr("Video background color (RGB)"), (int *)&Background, 0, 0x00FFFFFF)); Add(new cMenuEditIntItem(tr("Video background color (Alpha)"), (int *)&BackgroundAlpha, 0, 0xFF)); Add(new cMenuEditBoolItem(tr("Use studio levels (vdpau only)"), &StudioLevels, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("60hz display mode"), &_60HzMode, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Soft start a/v sync"), &SoftStartSync, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Black during channel switch"), &BlackPicture, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Clear decoder on channel switch"), &ClearOnSwitch, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr("Brightness (-1000..1000) (vdpau)"), &Brightness, -1000, 1000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Contrast (0..10000) (vdpau)"), &Contrast, 0, 10000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Saturation (0..10000) (vdpau)"), &Saturation, 0, 10000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Hue (-3141..3141) (vdpau)"), &Hue, -3141, 3141, tr("min"), tr("max"))); for (i = 0; i < RESOLUTIONS; ++i) { cString msg; // short hidden informations msg = cString::sprintf("%s,%s%s%s%s,...", scaling_short[Scaling[i]], deinterlace_short[Deinterlace[i]], SkipChromaDeinterlace[i] ? ",skip" : "", InverseTelecine[i] ? ",ITC" : "", Denoise[i] ? ",DN" : ""); Add(CollapsedItem(resolution[i], ResolutionShown[i], msg)); if (ResolutionShown[i]) { Add(new cMenuEditStraItem(tr("Scaling"), &Scaling[i], 4, scaling)); Add(new cMenuEditStraItem(tr("Deinterlace"), &Deinterlace[i], 6, deinterlace)); Add(new cMenuEditBoolItem(tr("SkipChromaDeinterlace (vdpau)"), &SkipChromaDeinterlace[i], trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Inverse Telecine (vdpau)"), &InverseTelecine[i], trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr("Denoise (0..1000) (vdpau)"), &Denoise[i], 0, 1000, tr("off"), tr("max"))); Add(new cMenuEditIntItem(tr("Sharpen (-1000..1000) (vdpau)"), &Sharpen[i], -1000, 1000, tr("blur max"), tr("sharpen max"))); Add(new cMenuEditIntItem(tr("Cut top and bottom (pixel)"), &CutTopBottom[i], 0, 250)); Add(new cMenuEditIntItem(tr("Cut left and right (pixel)"), &CutLeftRight[i], 0, 250)); } } // // auto-crop // Add(SeparatorItem(tr("Auto-crop"))); Add(new cMenuEditIntItem(tr("Autocrop interval (frames)"), &AutoCropInterval, 0, 200, tr("off"))); Add(new cMenuEditIntItem(tr("Autocrop delay (n * interval)"), &AutoCropDelay, 0, 200)); Add(new cMenuEditIntItem(tr("Autocrop tolerance (pixel)"), &AutoCropTolerance, 0, 32)); } // // audio // Add(CollapsedItem(tr("Audio"), Audio)); if (Audio) { Add(new cMenuEditIntItem(tr("Audio/Video delay (ms)"), &AudioDelay, -1000, 1000)); Add(new cMenuEditStraItem(tr("Audio drift correction"), &AudioDrift, 4, audiodrift)); Add(new cMenuEditBoolItem(tr("Pass-through default"), &AudioPassthroughDefault, trVDR("off"), trVDR("on"))); Add(new cMenuEditBoolItem(tr("\040\040PCM pass-through"), &AudioPassthroughPCM, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("\040\040AC-3 pass-through"), &AudioPassthroughAC3, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("\040\040E-AC-3 pass-through"), &AudioPassthroughEAC3, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Enable (E-)AC-3 (decoder) downmix"), &AudioDownmix, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Volume control"), &AudioSoftvol, tr("Hardware"), tr("Software"))); Add(new cMenuEditBoolItem(tr("Enable normalize volume"), &AudioNormalize, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr(" Max normalize factor (/1000)"), &AudioMaxNormalize, 0, 10000)); Add(new cMenuEditBoolItem(tr("Enable volume compression"), &AudioCompression, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr(" Max compression factor (/1000)"), &AudioMaxCompression, 0, 10000)); Add(new cMenuEditIntItem(tr("Reduce stereo volume (/1000)"), &AudioStereoDescent, 0, 1000)); Add(new cMenuEditIntItem(tr("Audio buffer size (ms)"), &AudioBufferTime, 0, 1000)); Add(new cMenuEditBoolItem(tr("Enable automatic AES"), &AudioAutoAES, trVDR("no"), trVDR("yes"))); } #ifdef USE_PIP // // PIP // Add(CollapsedItem(tr("Picture-In-Picture"), Pip)); if (Pip) { // FIXME: predefined modes/custom mode Add(new cMenuEditIntItem(tr("Pip X (%)"), &PipX, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Y (%)"), &PipY, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Width (%)"), &PipWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Height (%)"), &PipHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Video X (%)"), &PipVideoX, 0, 100)); Add(new cMenuEditIntItem(tr("Video Y (%)"), &PipVideoY, 0, 100)); Add(new cMenuEditIntItem(tr("Video Width (%)"), &PipVideoWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Video Height (%)"), &PipVideoHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip X (%)"), &PipAltX, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Y (%)"), &PipAltY, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Width (%)"), &PipAltWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Height (%)"), &PipAltHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video X (%)"), &PipAltVideoX, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Y (%)"), &PipAltVideoY, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Width (%)"), &PipAltVideoWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Height (%)"), &PipAltVideoHeight, 0, 100)); } #endif SetCurrent(Get(current)); // restore selected menu entry Display(); // display build menu } /** ** Process key for setup menu. */ eOSState cMenuSetupSoft::ProcessKey(eKeys key) { eOSState state; int old_general; int old_video; int old_audio; #ifdef USE_PIP int old_pip; #endif int old_osd_size; int old_resolution_shown[RESOLUTIONS]; int i; old_general = General; old_video = Video; old_audio = Audio; #ifdef USE_PIP old_pip = Pip; #endif old_osd_size = OsdSize; memcpy(old_resolution_shown, ResolutionShown, sizeof(ResolutionShown)); state = cMenuSetupPage::ProcessKey(key); if (key != kNone) { // update menu only, if something on the structure has changed // this is needed because VDR menus are evil slow if (old_general != General || old_video != Video || old_audio != Audio #ifdef USE_PIP || old_pip != Pip #endif || old_osd_size != OsdSize) { Create(); // update menu } else { for (i = 0; i < RESOLUTIONS; ++i) { if (old_resolution_shown[i] != ResolutionShown[i]) { Create(); // update menu break; } } } } return state; } /** ** Constructor setup menu. ** ** Import global config variables into setup. */ cMenuSetupSoft::cMenuSetupSoft(void) { int i; // // general // General = 0; MakePrimary = ConfigMakePrimary; HideMainMenuEntry = ConfigHideMainMenuEntry; DetachFromMainMenu = ConfigDetachFromMainMenu; // // osd // OsdWidth = ConfigOsdWidth; OsdHeight = ConfigOsdHeight; if (!OsdWidth && !OsdHeight) { OsdSize = 0; } else if (OsdWidth == 1920 && OsdHeight == 1080) { OsdSize = 1; } else if (OsdWidth == 1280 && OsdHeight == 720) { OsdSize = 2; } else { OsdSize = 3; } // // suspend // SuspendClose = ConfigSuspendClose; SuspendX11 = ConfigSuspendX11; // // video // Video = 0; Video4to3DisplayFormat = Config4to3DisplayFormat; VideoOtherDisplayFormat = ConfigOtherDisplayFormat; // no unsigned int menu item supported, split background color/alpha Background = ConfigVideoBackground >> 8; BackgroundAlpha = ConfigVideoBackground & 0xFF; StudioLevels = ConfigVideoStudioLevels; _60HzMode = ConfigVideo60HzMode; SoftStartSync = ConfigVideoSoftStartSync; BlackPicture = ConfigVideoBlackPicture; ClearOnSwitch = ConfigVideoClearOnSwitch; Brightness = ConfigVideoBrightness; Contrast = ConfigVideoContrast; Saturation = ConfigVideoSaturation; Hue = ConfigVideoHue; for (i = 0; i < RESOLUTIONS; ++i) { ResolutionShown[i] = 0; Scaling[i] = ConfigVideoScaling[i]; Deinterlace[i] = ConfigVideoDeinterlace[i]; SkipChromaDeinterlace[i] = ConfigVideoSkipChromaDeinterlace[i]; InverseTelecine[i] = ConfigVideoInverseTelecine[i]; Denoise[i] = ConfigVideoDenoise[i]; Sharpen[i] = ConfigVideoSharpen[i]; CutTopBottom[i] = ConfigVideoCutTopBottom[i]; CutLeftRight[i] = ConfigVideoCutLeftRight[i]; } // // auto-crop // AutoCropInterval = ConfigAutoCropInterval; AutoCropDelay = ConfigAutoCropDelay; AutoCropTolerance = ConfigAutoCropTolerance; // // audio // Audio = 0; AudioDelay = ConfigVideoAudioDelay; AudioDrift = ConfigAudioDrift; AudioPassthroughDefault = AudioPassthroughState; AudioPassthroughPCM = ConfigAudioPassthrough & CodecPCM; AudioPassthroughAC3 = ConfigAudioPassthrough & CodecAC3; AudioPassthroughEAC3 = ConfigAudioPassthrough & CodecEAC3; AudioDownmix = ConfigAudioDownmix; AudioSoftvol = ConfigAudioSoftvol; AudioNormalize = ConfigAudioNormalize; AudioMaxNormalize = ConfigAudioMaxNormalize; AudioCompression = ConfigAudioCompression; AudioMaxCompression = ConfigAudioMaxCompression; AudioStereoDescent = ConfigAudioStereoDescent; AudioBufferTime = ConfigAudioBufferTime; AudioAutoAES = ConfigAudioAutoAES; #ifdef USE_PIP // // PIP // Pip = 0; PipX = ConfigPipX; PipY = ConfigPipY; PipWidth = ConfigPipWidth; PipHeight = ConfigPipHeight; PipVideoX = ConfigPipVideoX; PipVideoY = ConfigPipVideoY; PipVideoWidth = ConfigPipVideoWidth; PipVideoHeight = ConfigPipVideoHeight; PipAltX = ConfigPipAltX; PipAltY = ConfigPipAltY; PipAltWidth = ConfigPipAltWidth; PipAltHeight = ConfigPipAltHeight; PipAltVideoX = ConfigPipAltVideoX; PipAltVideoY = ConfigPipAltVideoY; PipAltVideoWidth = ConfigPipAltVideoWidth; PipAltVideoHeight = ConfigPipAltVideoHeight; #endif #ifdef USE_SCREENSAVER EnableDPMSatBlackScreen = ConfigEnableDPMSatBlackScreen; #endif Create(); } /** ** Store setup. */ void cMenuSetupSoft::Store(void) { int i; SetupStore("MakePrimary", ConfigMakePrimary = MakePrimary); SetupStore("HideMainMenuEntry", ConfigHideMainMenuEntry = HideMainMenuEntry); SetupStore("DetachFromMainMenu", ConfigDetachFromMainMenu = DetachFromMainMenu); switch (OsdSize) { case 0: OsdWidth = 0; OsdHeight = 0; break; case 1: OsdWidth = 1920; OsdHeight = 1080; break; case 2: OsdWidth = 1280; OsdHeight = 720; default: break; } if (ConfigOsdWidth != OsdWidth || ConfigOsdHeight != OsdHeight) { VideoSetOsdSize(ConfigOsdWidth = OsdWidth, ConfigOsdHeight = OsdHeight); // FIXME: shown osd size not updated } SetupStore("Osd.Width", ConfigOsdWidth); SetupStore("Osd.Height", ConfigOsdHeight); SetupStore("Suspend.Close", ConfigSuspendClose = SuspendClose); SetupStore("Suspend.X11", ConfigSuspendX11 = SuspendX11); SetupStore("Video4to3DisplayFormat", Config4to3DisplayFormat = Video4to3DisplayFormat); VideoSet4to3DisplayFormat(Config4to3DisplayFormat); SetupStore("VideoOtherDisplayFormat", ConfigOtherDisplayFormat = VideoOtherDisplayFormat); VideoSetOtherDisplayFormat(ConfigOtherDisplayFormat); ConfigVideoBackground = Background << 8 | (BackgroundAlpha & 0xFF); SetupStore("Background", ConfigVideoBackground); VideoSetBackground(ConfigVideoBackground); SetupStore("StudioLevels", ConfigVideoStudioLevels = StudioLevels); VideoSetStudioLevels(ConfigVideoStudioLevels); SetupStore("60HzMode", ConfigVideo60HzMode = _60HzMode); VideoSet60HzMode(ConfigVideo60HzMode); SetupStore("SoftStartSync", ConfigVideoSoftStartSync = SoftStartSync); VideoSetSoftStartSync(ConfigVideoSoftStartSync); SetupStore("BlackPicture", ConfigVideoBlackPicture = BlackPicture); VideoSetBlackPicture(ConfigVideoBlackPicture); SetupStore("ClearOnSwitch", ConfigVideoClearOnSwitch = ClearOnSwitch); SetupStore("Brightness", ConfigVideoBrightness = Brightness); VideoSetBrightness(ConfigVideoBrightness); SetupStore("Contrast", ConfigVideoContrast = Contrast); VideoSetContrast(ConfigVideoContrast); SetupStore("Saturation", ConfigVideoSaturation = Saturation); VideoSetSaturation(ConfigVideoSaturation); SetupStore("Hue", ConfigVideoHue = Hue); VideoSetHue(ConfigVideoHue); for (i = 0; i < RESOLUTIONS; ++i) { char buf[128]; snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Scaling"); SetupStore(buf, ConfigVideoScaling[i] = Scaling[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Deinterlace"); SetupStore(buf, ConfigVideoDeinterlace[i] = Deinterlace[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace"); SetupStore(buf, ConfigVideoSkipChromaDeinterlace[i] = SkipChromaDeinterlace[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "InverseTelecine"); SetupStore(buf, ConfigVideoInverseTelecine[i] = InverseTelecine[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Denoise"); SetupStore(buf, ConfigVideoDenoise[i] = Denoise[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Sharpen"); SetupStore(buf, ConfigVideoSharpen[i] = Sharpen[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutTopBottom"); SetupStore(buf, ConfigVideoCutTopBottom[i] = CutTopBottom[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutLeftRight"); SetupStore(buf, ConfigVideoCutLeftRight[i] = CutLeftRight[i]); } VideoSetScaling(ConfigVideoScaling); VideoSetDeinterlace(ConfigVideoDeinterlace); VideoSetSkipChromaDeinterlace(ConfigVideoSkipChromaDeinterlace); VideoSetInverseTelecine(ConfigVideoInverseTelecine); VideoSetDenoise(ConfigVideoDenoise); VideoSetSharpen(ConfigVideoSharpen); VideoSetCutTopBottom(ConfigVideoCutTopBottom); VideoSetCutLeftRight(ConfigVideoCutLeftRight); SetupStore("AutoCrop.Interval", ConfigAutoCropInterval = AutoCropInterval); SetupStore("AutoCrop.Delay", ConfigAutoCropDelay = AutoCropDelay); SetupStore("AutoCrop.Tolerance", ConfigAutoCropTolerance = AutoCropTolerance); VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); ConfigAutoCropEnabled = ConfigAutoCropInterval != 0; SetupStore("AudioDelay", ConfigVideoAudioDelay = AudioDelay); VideoSetAudioDelay(ConfigVideoAudioDelay); SetupStore("AudioDrift", ConfigAudioDrift = AudioDrift); CodecSetAudioDrift(ConfigAudioDrift); // FIXME: can handle more audio state changes here // downmix changed reset audio, to get change direct if (ConfigAudioDownmix != AudioDownmix) { ResetChannelId(); } ConfigAudioPassthrough = (AudioPassthroughPCM ? CodecPCM : 0) | (AudioPassthroughAC3 ? CodecAC3 : 0) | (AudioPassthroughEAC3 ? CodecEAC3 : 0); AudioPassthroughState = AudioPassthroughDefault; if (AudioPassthroughState) { SetupStore("AudioPassthrough", ConfigAudioPassthrough); CodecSetAudioPassthrough(ConfigAudioPassthrough); } else { SetupStore("AudioPassthrough", -ConfigAudioPassthrough); CodecSetAudioPassthrough(0); } SetupStore("AudioDownmix", ConfigAudioDownmix = AudioDownmix); CodecSetAudioDownmix(ConfigAudioDownmix); SetupStore("AudioSoftvol", ConfigAudioSoftvol = AudioSoftvol); AudioSetSoftvol(ConfigAudioSoftvol); SetupStore("AudioNormalize", ConfigAudioNormalize = AudioNormalize); SetupStore("AudioMaxNormalize", ConfigAudioMaxNormalize = AudioMaxNormalize); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); SetupStore("AudioCompression", ConfigAudioCompression = AudioCompression); SetupStore("AudioMaxCompression", ConfigAudioMaxCompression = AudioMaxCompression); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); SetupStore("AudioStereoDescent", ConfigAudioStereoDescent = AudioStereoDescent); AudioSetStereoDescent(ConfigAudioStereoDescent); SetupStore("AudioBufferTime", ConfigAudioBufferTime = AudioBufferTime); SetupStore("AudioAutoAES", ConfigAudioAutoAES = AudioAutoAES); AudioSetAutoAES(ConfigAudioAutoAES); #ifdef USE_PIP SetupStore("pip.X", ConfigPipX = PipX); SetupStore("pip.Y", ConfigPipY = PipY); SetupStore("pip.Width", ConfigPipWidth = PipWidth); SetupStore("pip.Height", ConfigPipHeight = PipHeight); SetupStore("pip.VideoX", ConfigPipVideoX = PipVideoX); SetupStore("pip.VideoY", ConfigPipVideoY = PipVideoY); SetupStore("pip.VideoWidth", ConfigPipVideoWidth = PipVideoWidth); SetupStore("pip.VideoHeight", ConfigPipVideoHeight = PipVideoHeight); SetupStore("pip.Alt.X", ConfigPipAltX = PipAltX); SetupStore("pip.Alt.Y", ConfigPipAltY = PipAltY); SetupStore("pip.Alt.Width", ConfigPipAltWidth = PipAltWidth); SetupStore("pip.Alt.Height", ConfigPipAltHeight = PipAltHeight); SetupStore("pip.Alt.VideoX", ConfigPipAltVideoX = PipAltVideoX); SetupStore("pip.Alt.VideoY", ConfigPipAltVideoY = PipAltVideoY); SetupStore("pip.Alt.VideoWidth", ConfigPipAltVideoWidth = PipAltVideoWidth); SetupStore("pip.Alt.VideoHeight", ConfigPipAltVideoHeight = PipAltVideoHeight); #endif #ifdef USE_SCREENSAVER SetupStore("EnableDPMSatBlackScreen", ConfigEnableDPMSatBlackScreen = EnableDPMSatBlackScreen); SetDPMSatBlackScreen(ConfigEnableDPMSatBlackScreen); #endif } ////////////////////////////////////////////////////////////////////////////// // cPlayer ////////////////////////////////////////////////////////////////////////////// /** ** Dummy player for suspend mode. */ class cSoftHdPlayer:public cPlayer { protected: public: cSoftHdPlayer(void); virtual ~ cSoftHdPlayer(); }; cSoftHdPlayer::cSoftHdPlayer(void) { } cSoftHdPlayer::~cSoftHdPlayer() { Detach(); } ////////////////////////////////////////////////////////////////////////////// // cControl ////////////////////////////////////////////////////////////////////////////// /** ** Dummy control class for suspend mode. */ class cSoftHdControl:public cControl { public: static cSoftHdPlayer *Player; ///< dummy player virtual void Hide(void) ///< hide control { } virtual eOSState ProcessKey(eKeys); ///< process input events cSoftHdControl(void); ///< control constructor virtual ~ cSoftHdControl(); ///< control destructor }; cSoftHdPlayer *cSoftHdControl::Player; ///< dummy player instance /** ** Handle a key event. ** ** @param key key pressed */ eOSState cSoftHdControl::ProcessKey(eKeys key) { if (SuspendMode == SUSPEND_NORMAL && (!ISMODELESSKEY(key) || key == kMenu || key == kBack || key == kStop)) { delete Player; Player = NULL; Resume(); SuspendMode = NOT_SUSPENDED; return osEnd; } return osContinue; } /** ** Player control constructor. */ cSoftHdControl::cSoftHdControl(void) : cControl(Player = new cSoftHdPlayer) { } /** ** Player control destructor. */ cSoftHdControl::~cSoftHdControl() { delete Player; Player = NULL; // loose control resume if (SuspendMode == SUSPEND_NORMAL) { Resume(); SuspendMode = NOT_SUSPENDED; } dsyslog("[softhddev]%s: dummy player stopped\n", __FUNCTION__); } ////////////////////////////////////////////////////////////////////////////// // PIP ////////////////////////////////////////////////////////////////////////////// #ifdef USE_PIP extern "C" void DelPip(void); ///< remove PIP static int PipAltPosition; ///< flag alternative position ////////////////////////////////////////////////////////////////////////////// // cReceiver ////////////////////////////////////////////////////////////////////////////// #include /** ** Receiver class for PIP mode. */ class cSoftReceiver:public cReceiver { protected: virtual void Activate(bool); #if APIVERSNUM >= 20301 virtual void Receive(const uchar *, int); #else virtual void Receive(uchar *, int); #endif public: cSoftReceiver(const cChannel *); ///< receiver constructor virtual ~ cSoftReceiver(); ///< receiver destructor }; /** ** Receiver constructor. ** ** @param channel channel to receive */ cSoftReceiver::cSoftReceiver(const cChannel * channel):cReceiver(NULL, MINPRIORITY) { // cReceiver::channelID not setup, this can cause trouble // we want video only AddPid(channel->Vpid()); } /** ** Receiver destructor. */ cSoftReceiver::~cSoftReceiver() { Detach(); } /** ** Called before the receiver gets attached or detached. ** ** @param on flag attached, detached */ void cSoftReceiver::Activate(bool on) { if (on) { int width; int height; double video_aspect; GetOsdSize(&width, &height, &video_aspect); if (PipAltPosition) { PipStart((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100, ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width, ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height, (ConfigPipAltX * width) / 100, (ConfigPipAltY * height) / 100, ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width, ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height); } else { PipStart((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100, ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width, ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100, (ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width, ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height); } } else { PipStop(); } } /// /// Parse packetized elementary stream. /// /// @param data payload data of transport stream /// @param size number of payload data bytes /// @param is_start flag, start of pes packet /// static void PipPesParse(const uint8_t * data, int size, int is_start) { static uint8_t *pes_buf; static int pes_size; static int pes_index; // FIXME: quick&dirty if (!pes_buf) { pes_size = 500 * 1024 * 1024; pes_buf = (uint8_t *) malloc(pes_size); if (!pes_buf) { // out of memory, should never happen return; } pes_index = 0; } if (is_start) { // start of pes packet if (pes_index) { if (0) { fprintf(stderr, "pip: PES packet %8d %02x%02x\n", pes_index, pes_buf[2], pes_buf[3]); } if (pes_buf[0] || pes_buf[1] || pes_buf[2] != 0x01) { // FIXME: first should always fail esyslog(tr("[softhddev]pip: invalid PES packet %d\n"), pes_index); } else { PipPlayVideo(pes_buf, pes_index); // FIXME: buffer full: pes packet is dropped } pes_index = 0; } } if (pes_index + size > pes_size) { esyslog(tr("[softhddev]pip: pes buffer too small\n")); pes_size *= 2; if (pes_index + size > pes_size) { pes_size = (pes_index + size) * 2; } pes_buf = (uint8_t *) realloc(pes_buf, pes_size); if (!pes_buf) { // out of memory, should never happen return; } } memcpy(pes_buf + pes_index, data, size); pes_index += size; } /// Transport stream packet size #define TS_PACKET_SIZE 188 /// Transport stream packet sync byte #define TS_PACKET_SYNC 0x47 /** ** Receive TS packet from device. ** ** @param data ts packet ** @param size size (#TS_PACKET_SIZE=188) of tes packet */ #if APIVERSNUM >= 20301 void cSoftReceiver::Receive(const uchar * data, int size) #else void cSoftReceiver::Receive(uchar * data, int size) #endif { const uint8_t *p; p = data; while (size >= TS_PACKET_SIZE) { int payload; if (p[0] != TS_PACKET_SYNC) { esyslog(tr("[softhddev]tsdemux: transport stream out of sync\n")); // FIXME: kill all buffers return; } if (p[1] & 0x80) { // error indicatord dsyslog("[softhddev]tsdemux: transport error\n"); // FIXME: kill all buffers goto next_packet; } if (0) { int pid; pid = (p[1] & 0x1F) << 8 | p[2]; fprintf(stderr, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : ""); } // skip adaptation field switch (p[3] & 0x30) { // adaption field case 0x00: // reserved case 0x20: // adaptation field only default: goto next_packet; case 0x10: // only payload payload = 4; break; case 0x30: // skip adapation field payload = 5 + p[4]; // illegal length, ignore packet if (payload >= TS_PACKET_SIZE) { dsyslog ("[softhddev]tsdemux: illegal adaption field length\n"); goto next_packet; } break; } PipPesParse(p + payload, TS_PACKET_SIZE - payload, p[1] & 0x40); next_packet: p += TS_PACKET_SIZE; size -= TS_PACKET_SIZE; } } ////////////////////////////////////////////////////////////////////////////// static cSoftReceiver *PipReceiver; ///< PIP receiver static int PipChannelNr; ///< last PIP channel number static const cChannel *PipChannel; ///< current PIP channel /** ** Stop PIP. */ extern "C" void DelPip(void) { delete PipReceiver; PipReceiver = NULL; PipChannel = NULL; } /** ** Prepare new PIP. ** ** @param channel_nr channel number */ static void NewPip(int channel_nr) { const cChannel *channel; cDevice *device; cSoftReceiver *receiver; #ifdef DEBUG // is device replaying? if (cDevice::PrimaryDevice()->Replaying() && cControl::Control()) { dsyslog("[softhddev]%s: replay active\n", __FUNCTION__); // FIXME: need to find PID } #endif if (!channel_nr) { channel_nr = cDevice::CurrentChannel(); } LOCK_CHANNELS_READ; if (channel_nr && (channel = Channels MURKS GetByNumber(channel_nr)) && (device = cDevice::GetDevice(channel, 0, false, false))) { DelPip(); device->SwitchChannel(channel, false); receiver = new cSoftReceiver(channel); device->AttachReceiver(receiver); PipReceiver = receiver; PipChannel = channel; PipChannelNr = channel_nr; } } /** ** Toggle PIP on/off. */ static void TogglePip(void) { if (PipReceiver) { int attached; attached = PipReceiver->IsAttached(); DelPip(); if (attached) { // turn off only if last PIP was on return; } } NewPip(PipChannelNr); } /** ** Switch PIP to next available channel. ** ** @param direction direction of channel switch */ static void PipNextAvailableChannel(int direction) { const cChannel *channel; const cChannel *first; channel = PipChannel; first = channel; DelPip(); // disable PIP to free the device LOCK_CHANNELS_READ; while (channel) { bool ndr; cDevice *device; channel = direction > 0 ? Channels MURKS Next(channel) : Channels MURKS Prev(channel); if (!channel && Setup.ChannelsWrap) { channel = direction > 0 ? Channels MURKS First() : Channels MURKS Last(); } if (channel && !channel->GroupSep() && (device = cDevice::GetDevice(channel, 0, false, true)) && device->ProvidesChannel(channel, 0, &ndr) && !ndr) { NewPip(channel->Number()); return; } if (channel == first) { Skins.Message(mtError, tr("Channel not available!")); break; } } } /** ** Swap PIP channels. */ static void SwapPipChannels(void) { const cChannel *channel; channel = PipChannel; DelPip(); NewPip(0); if (channel) { LOCK_CHANNELS_READ; Channels MURKS SwitchTo(channel->Number()); } } /** ** Swap PIP position. */ static void SwapPipPosition(void) { int width; int height; double video_aspect; PipAltPosition ^= 1; if (!PipReceiver) { // no PIP visible, no update needed return; } GetOsdSize(&width, &height, &video_aspect); if (PipAltPosition) { PipSetPosition((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100, ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width, ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height, (ConfigPipAltX * width) / 100, (ConfigPipAltY * height) / 100, ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width, ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height); } else { PipSetPosition((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100, ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width, ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100, (ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width, ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height); } } #endif ////////////////////////////////////////////////////////////////////////////// // cOsdMenu ////////////////////////////////////////////////////////////////////////////// /** ** Hotkey parsing state machine. */ typedef enum { HksInitial, ///< initial state HksBlue, ///< blue button pressed HksBlue1, ///< blue and 1 number pressed HksRed, ///< red button pressed } HkState; /** ** Soft device plugin menu class. */ class cSoftHdMenu:public cOsdMenu { private: HkState HotkeyState; ///< current hot-key state int HotkeyCode; ///< current hot-key code void Create(void); ///< create plugin main menu public: cSoftHdMenu(const char *, int = 0, int = 0, int = 0, int = 0, int = 0); virtual ~ cSoftHdMenu(); virtual eOSState ProcessKey(eKeys); }; /** ** Create main menu. */ void cSoftHdMenu::Create(void) { int current; int missed; int duped; int dropped; int counter; current = Current(); // get current menu item index Clear(); // clear the menu SetHasHotkeys(); if (ConfigDetachFromMainMenu) { Add(new cOsdItem(hk(tr("Detach SoftHdDevice")), osUser1)); } else { Add(new cOsdItem(hk(tr("Suspend SoftHdDevice")), osUser1)); } #ifdef USE_PIP if (PipReceiver) { Add(new cOsdItem(hk(tr("PIP toggle on/off: off")), osUser2)); } else { Add(new cOsdItem(hk(tr("PIP toggle on/off: on")), osUser2)); } Add(new cOsdItem(hk(tr("PIP zapmode (not working)")), osUser3)); Add(new cOsdItem(hk(tr("PIP channel +")), osUser4)); Add(new cOsdItem(hk(tr("PIP channel -")), osUser5)); if (PipReceiver) { Add(new cOsdItem(hk(tr("PIP on/swap channels: swap")), osUser6)); } else { Add(new cOsdItem(hk(tr("PIP on/swap channels: on")), osUser6)); } if (PipAltPosition) { Add(new cOsdItem(hk(tr("PIP swap position: normal")), osUser7)); } else { Add(new cOsdItem(hk(tr("PIP swap position: alternative")), osUser7)); } Add(new cOsdItem(hk(tr("PIP close")), osUser8)); #endif Add(new cOsdItem(NULL, osUnknown, false)); Add(new cOsdItem(NULL, osUnknown, false)); GetStats(&missed, &duped, &dropped, &counter); Add(new cOsdItem(cString::sprintf(tr (" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped, counter), osUnknown, false)); SetCurrent(Get(current)); // restore selected menu entry Display(); // display build menu } /** ** Soft device menu constructor. */ cSoftHdMenu::cSoftHdMenu(const char *title, int c0, int c1, int c2, int c3, int c4) :cOsdMenu(title, c0, c1, c2, c3, c4) { HotkeyState = HksInitial; Create(); } /** ** Soft device menu destructor. */ cSoftHdMenu::~cSoftHdMenu() { } /** ** Handle hot key commands. ** ** @param code numeric hot key code */ static void HandleHotkey(int code) { switch (code) { case 10: // disable pass-through AudioPassthroughState = 0; CodecSetAudioPassthrough(0); Skins.QueueMessage(mtInfo, tr("pass-through disabled")); break; case 11: // enable pass-through // note: you can't enable, without configured pass-through AudioPassthroughState = 1; CodecSetAudioPassthrough(ConfigAudioPassthrough); Skins.QueueMessage(mtInfo, tr("pass-through enabled")); break; case 12: // toggle pass-through AudioPassthroughState ^= 1; if (AudioPassthroughState) { CodecSetAudioPassthrough(ConfigAudioPassthrough); Skins.QueueMessage(mtInfo, tr("pass-through enabled")); } else { CodecSetAudioPassthrough(0); Skins.QueueMessage(mtInfo, tr("pass-through disabled")); } break; case 13: // decrease audio delay ConfigVideoAudioDelay -= 10; VideoSetAudioDelay(ConfigVideoAudioDelay); Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay)); break; case 14: // increase audio delay ConfigVideoAudioDelay += 10; VideoSetAudioDelay(ConfigVideoAudioDelay); Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay)); break; case 15: ConfigAudioDownmix ^= 1; fprintf(stderr, "toggle downmix\n"); CodecSetAudioDownmix(ConfigAudioDownmix); if (ConfigAudioDownmix) { Skins.QueueMessage(mtInfo, tr("surround downmix enabled")); } else { Skins.QueueMessage(mtInfo, tr("surround downmix disabled")); } ResetChannelId(); break; case 20: // disable full screen VideoSetFullscreen(0); break; case 21: // enable full screen VideoSetFullscreen(1); break; case 22: // toggle full screen VideoSetFullscreen(-1); break; case 23: // disable auto-crop ConfigAutoCropEnabled = 0; VideoSetAutoCrop(0, ConfigAutoCropDelay, ConfigAutoCropTolerance); Skins.QueueMessage(mtInfo, tr("auto-crop disabled and freezed")); break; case 24: // enable auto-crop ConfigAutoCropEnabled = 1; if (!ConfigAutoCropInterval) { ConfigAutoCropInterval = 50; } VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); Skins.QueueMessage(mtInfo, tr("auto-crop enabled")); break; case 25: // toggle auto-crop ConfigAutoCropEnabled ^= 1; // no interval configured, use some default if (!ConfigAutoCropInterval) { ConfigAutoCropInterval = 50; } VideoSetAutoCrop(ConfigAutoCropEnabled * ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); if (ConfigAutoCropEnabled) { Skins.QueueMessage(mtInfo, tr("auto-crop enabled")); } else { Skins.QueueMessage(mtInfo, tr("auto-crop disabled and freezed")); } break; case 30: // change 4:3 -> window mode case 31: case 32: VideoSet4to3DisplayFormat(code - 30); break; case 39: // rotate 4:3 -> window mode VideoSet4to3DisplayFormat(-1); break; case 40: // change 16:9 -> window mode case 41: case 42: VideoSetOtherDisplayFormat(code - 40); break; case 49: // rotate 16:9 -> window mode VideoSetOtherDisplayFormat(-1); break; #ifdef USE_PIP case 102: // PIP toggle TogglePip(); break; case 104: PipNextAvailableChannel(1); break; case 105: PipNextAvailableChannel(-1); break; case 106: SwapPipChannels(); break; case 107: SwapPipPosition(); break; case 108: DelPip(); PipChannelNr = 0; break; #endif default: esyslog(tr("[softhddev]: hot key %d is not supported\n"), code); break; } } /** ** Handle key event. ** ** @param key key event */ eOSState cSoftHdMenu::ProcessKey(eKeys key) { eOSState state; //dsyslog("[softhddev]%s: %x\n", __FUNCTION__, key); switch (HotkeyState) { case HksInitial: // initial state, waiting for hot key if (key == kBlue) { HotkeyState = HksBlue; // blue button return osContinue; } if (key == kRed) { HotkeyState = HksRed; // red button return osContinue; } break; case HksBlue: // blue and first number if (k0 <= key && key <= k9) { HotkeyCode = key - k0; HotkeyState = HksBlue1; return osContinue; } HotkeyState = HksInitial; break; case HksBlue1: // blue and second number/enter if (k0 <= key && key <= k9) { HotkeyCode *= 10; HotkeyCode += key - k0; HotkeyState = HksInitial; dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode); HandleHotkey(HotkeyCode); return osEnd; } if (key == kOk) { HotkeyState = HksInitial; dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode); HandleHotkey(HotkeyCode); return osEnd; } HotkeyState = HksInitial; case HksRed: // red and first number if (k0 <= key && key <= k9) { HotkeyCode = 100 + key - k0; HotkeyState = HksInitial; HandleHotkey(HotkeyCode); return osEnd; } HotkeyState = HksInitial; break; } // call standard function state = cOsdMenu::ProcessKey(key); switch (state) { case osUser1: // not already suspended if (SuspendMode == NOT_SUSPENDED && !cSoftHdControl::Player) { cControl::Launch(new cSoftHdControl); cControl::Attach(); if (ConfigDetachFromMainMenu) { Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; } else { Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; } if (ShutdownHandler.GetUserInactiveTime()) { dsyslog("[softhddev]%s: set user inactive\n", __FUNCTION__); ShutdownHandler.SetUserInactive(); } } return osEnd; #ifdef USE_PIP case osUser2: TogglePip(); return osEnd; case osUser4: PipNextAvailableChannel(1); return osEnd; case osUser5: PipNextAvailableChannel(-1); return osEnd; case osUser6: SwapPipChannels(); return osEnd; case osUser7: SwapPipPosition(); return osEnd; case osUser8: DelPip(); PipChannelNr = 0; return osEnd; #endif default: Create(); break; } return state; } ////////////////////////////////////////////////////////////////////////////// // cDevice ////////////////////////////////////////////////////////////////////////////// class cSoftHdDevice:public cDevice { public: cSoftHdDevice(void); virtual ~ cSoftHdDevice(void); virtual bool HasDecoder(void) const; virtual bool CanReplay(void) const; virtual bool SetPlayMode(ePlayMode); #if APIVERSNUM >= 20103 virtual void TrickSpeed(int, bool); #else virtual void TrickSpeed(int); #endif virtual void Clear(void); virtual void Play(void); virtual void Freeze(void); virtual void Mute(void); virtual void StillPicture(const uchar *, int); virtual bool Poll(cPoller &, int = 0); virtual bool Flush(int = 0); virtual int64_t GetSTC(void); #if APIVERSNUM >= 10733 virtual cRect CanScaleVideo(const cRect &, int = taCenter); virtual void ScaleVideo(const cRect & = cRect::Null); #endif virtual void SetVideoDisplayFormat(eVideoDisplayFormat); virtual void SetVideoFormat(bool); virtual void GetVideoSize(int &, int &, double &); virtual void GetOsdSize(int &, int &, double &); virtual int PlayVideo(const uchar *, int); virtual int PlayAudio(const uchar *, int, uchar); #ifdef USE_TS_VIDEO virtual int PlayTsVideo(const uchar *, int); #endif #if !defined(USE_AUDIO_THREAD) || !defined(NO_TS_AUDIO) virtual int PlayTsAudio(const uchar *, int); #endif virtual void SetAudioChannelDevice(int); virtual int GetAudioChannelDevice(void); virtual void SetDigitalAudioDevice(bool); virtual void SetAudioTrackDevice(eTrackType); virtual void SetVolumeDevice(int); // Image Grab facilities virtual uchar *GrabImage(int &, bool, int, int, int); #ifdef USE_VDR_SPU // SPU facilities private: cDvbSpuDecoder * spuDecoder; public: virtual cSpuDecoder * GetSpuDecoder(void); #endif protected: virtual void MakePrimaryDevice(bool); }; /** ** Constructor device. */ cSoftHdDevice::cSoftHdDevice(void) { //dsyslog("[softhddev]%s\n", __FUNCTION__); #ifdef USE_VDR_SPU spuDecoder = NULL; #endif } /** ** Destructor device. */ cSoftHdDevice::~cSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); #ifdef USE_VDR_SPU delete spuDecoder; #endif } /** ** Informs a device that it will be the primary device. ** ** @param on flag if becoming or loosing primary */ void cSoftHdDevice::MakePrimaryDevice(bool on) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, on); cDevice::MakePrimaryDevice(on); if (on) { new cSoftOsdProvider(); if (SuspendMode == SUSPEND_DETACHED) { Resume(); SuspendMode = NOT_SUSPENDED; } } else if (SuspendMode == NOT_SUSPENDED) { Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; } } #ifdef USE_VDR_SPU /** ** Get the device SPU decoder. ** ** @returns a pointer to the device's SPU decoder (or NULL, if this ** device doesn't have an SPU decoder) */ cSpuDecoder *cSoftHdDevice::GetSpuDecoder(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); if (!spuDecoder && IsPrimaryDevice()) { spuDecoder = new cDvbSpuDecoder(); } return spuDecoder; } #endif /** ** Tells whether this device has a MPEG decoder. */ bool cSoftHdDevice::HasDecoder(void) const { return true; } /** ** Returns true if this device can currently start a replay session. */ bool cSoftHdDevice::CanReplay(void) const { return true; } /** ** Sets the device into the given play mode. ** ** @param play_mode new play mode (Audio/Video/External...) */ bool cSoftHdDevice::SetPlayMode(ePlayMode play_mode) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, play_mode); switch (play_mode) { case pmAudioVideo: break; case pmAudioOnly: case pmAudioOnlyBlack: break; case pmVideoOnly: break; case pmNone: break; case pmExtern_THIS_SHOULD_BE_AVOIDED: dsyslog("[softhddev] play mode external\n"); // FIXME: what if already suspended? Suspend(1, 1, 0); SuspendMode = SUSPEND_EXTERNAL; return true; default: dsyslog("[softhddev] playmode not implemented... %d\n", play_mode); break; } if (SuspendMode != NOT_SUSPENDED) { if (SuspendMode != SUSPEND_EXTERNAL) { return false; } Resume(); SuspendMode = NOT_SUSPENDED; } return::SetPlayMode(play_mode); } /** ** Gets the current System Time Counter, which can be used to ** synchronize audio, video and subtitles. */ int64_t cSoftHdDevice::GetSTC(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return::GetSTC(); } /** ** Set trick play speed. ** ** Every single frame shall then be displayed the given number of ** times. ** ** @param speed trick speed ** @param forward flag forward direction */ #if APIVERSNUM >= 20103 void cSoftHdDevice::TrickSpeed(int speed, bool forward) { dsyslog("[softhddev]%s: %d %d\n", __FUNCTION__, speed, forward); ::TrickSpeed(speed); } #else void cSoftHdDevice::TrickSpeed(int speed) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, speed); ::TrickSpeed(speed); } #endif /** ** Clears all video and audio data from the device. */ void cSoftHdDevice::Clear(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Clear(); ::Clear(); } /** ** Sets the device into play mode (after a previous trick mode) */ void cSoftHdDevice::Play(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Play(); ::Play(); } /** ** Puts the device into "freeze frame" mode. */ void cSoftHdDevice::Freeze(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Freeze(); ::Freeze(); } /** ** Turns off audio while replaying. */ void cSoftHdDevice::Mute(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Mute(); ::Mute(); } /** ** Display the given I-frame as a still picture. ** ** @param data pes or ts data of a frame ** @param length length of data area */ void cSoftHdDevice::StillPicture(const uchar * data, int length) { dsyslog("[softhddev]%s: %s %p %d\n", __FUNCTION__, data[0] == 0x47 ? "ts" : "pes", data, length); if (data[0] == 0x47) { // ts sync cDevice::StillPicture(data, length); return; } ::StillPicture(data, length); } /** ** Check if the device is ready for further action. ** ** @param poller file handles (unused) ** @param timeout_ms timeout in ms to become ready ** ** @retval true if ready ** @retval false if busy */ bool cSoftHdDevice::Poll( __attribute__ ((unused)) cPoller & poller, int timeout_ms) { //dsyslog("[softhddev]%s: %d\n", __FUNCTION__, timeout_ms); return::Poll(timeout_ms); } /** ** Flush the device output buffers. ** ** @param timeout_ms timeout in ms to become ready */ bool cSoftHdDevice::Flush(int timeout_ms) { dsyslog("[softhddev]%s: %d ms\n", __FUNCTION__, timeout_ms); return::Flush(timeout_ms); } // ---------------------------------------------------------------------------- /** ** Sets the video display format to the given one (only useful if this ** device has an MPEG decoder). */ void cSoftHdDevice:: SetVideoDisplayFormat(eVideoDisplayFormat video_display_format) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_display_format); cDevice::SetVideoDisplayFormat(video_display_format); #if 0 static int last = -1; // called on every channel switch, no need to kill osd... if (last != video_display_format) { last = video_display_format; ::VideoSetDisplayFormat(video_display_format); cSoftOsd::Dirty = 1; } #endif } /** ** Sets the output video format to either 16:9 or 4:3 (only useful ** if this device has an MPEG decoder). ** ** Should call SetVideoDisplayFormat. ** ** @param video_format16_9 flag true 16:9. */ void cSoftHdDevice::SetVideoFormat(bool video_format16_9) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_format16_9); // FIXME: 4:3 / 16:9 video format not supported. SetVideoDisplayFormat(eVideoDisplayFormat(Setup.VideoDisplayFormat)); } /** ** Returns the width, height and video_aspect ratio of the currently ** displayed video material. ** ** @note the video_aspect is used to scale the subtitle. */ void cSoftHdDevice::GetVideoSize(int &width, int &height, double &video_aspect) { ::GetVideoSize(&width, &height, &video_aspect); } /** ** Returns the width, height and pixel_aspect ratio the OSD. ** ** FIXME: Called every second, for nothing (no OSD displayed)? */ void cSoftHdDevice::GetOsdSize(int &width, int &height, double &pixel_aspect) { ::GetOsdSize(&width, &height, &pixel_aspect); } // ---------------------------------------------------------------------------- /** ** Play a audio packet. ** ** @param data exactly one complete PES packet (which is incomplete) ** @param length length of PES packet ** @param id type of audio data this packet holds */ int cSoftHdDevice::PlayAudio(const uchar * data, int length, uchar id) { //dsyslog("[softhddev]%s: %p %p %d %d\n", __FUNCTION__, this, data, length, id); return::PlayAudio(data, length, id); } void cSoftHdDevice::SetAudioTrackDevice( __attribute__ ((unused)) eTrackType type) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); } void cSoftHdDevice::SetDigitalAudioDevice( __attribute__ ((unused)) bool on) { //dsyslog("[softhddev]%s: %s\n", __FUNCTION__, on ? "true" : "false"); } void cSoftHdDevice::SetAudioChannelDevice( __attribute__ ((unused)) int audio_channel) { //dsyslog("[softhddev]%s: %d\n", __FUNCTION__, audio_channel); } int cSoftHdDevice::GetAudioChannelDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return 0; } /** ** Sets the audio volume on this device (Volume = 0...255). ** ** @param volume device volume */ void cSoftHdDevice::SetVolumeDevice(int volume) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, volume); ::SetVolumeDevice(volume); } // ---------------------------------------------------------------------------- /** ** Play a video packet. ** ** @param data exactly one complete PES packet (which is incomplete) ** @param length length of PES packet */ int cSoftHdDevice::PlayVideo(const uchar * data, int length) { //dsyslog("[softhddev]%s: %p %d\n", __FUNCTION__, data, length); return::PlayVideo(data, length); } #ifdef USE_TS_VIDEO /** ** Play a TS video packet. ** ** @param data ts data buffer ** @param length ts packet length (188) */ int cSoftHdDevice::PlayTsVideo(const uchar * data, int length) { } #endif #if !defined(USE_AUDIO_THREAD) || !defined(NO_TS_AUDIO) /** ** Play a TS audio packet. ** ** @param data ts data buffer ** @param length ts packet length (188) */ int cSoftHdDevice::PlayTsAudio(const uchar * data, int length) { #ifndef NO_TS_AUDIO return::PlayTsAudio(data, length); #else AudioPoller(); return cDevice::PlayTsAudio(data, length); #endif } #endif /** ** Grabs the currently visible screen image. ** ** @param size size of the returned data ** @param jpeg flag true, create JPEG data ** @param quality JPEG quality ** @param width number of horizontal pixels in the frame ** @param height number of vertical pixels in the frame */ uchar *cSoftHdDevice::GrabImage(int &size, bool jpeg, int quality, int width, int height) { dsyslog("[softhddev]%s: %d, %d, %d, %dx%d\n", __FUNCTION__, size, jpeg, quality, width, height); if (SuspendMode != NOT_SUSPENDED) { return NULL; } if (quality < 0) { // caller should care, but fix it quality = 95; } return::GrabImage(&size, jpeg, quality, width, height); } #if APIVERSNUM >= 10733 /** ** Ask the output, if it can scale video. ** ** @param rect requested video window rectangle ** ** @returns the real rectangle or cRect:Null if invalid. */ cRect cSoftHdDevice::CanScaleVideo(const cRect & rect, __attribute__ ((unused)) int alignment) { return rect; } /** ** Scale the currently shown video. ** ** @param rect video window rectangle */ void cSoftHdDevice::ScaleVideo(const cRect & rect) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, rect.Width(), rect.Height(), rect.X(), rect.Y()); #endif ::ScaleVideo(rect.X(), rect.Y(), rect.Width(), rect.Height()); } #endif /** ** Call rgb to jpeg for C Plugin. */ extern "C" uint8_t * CreateJpeg(uint8_t * image, int *size, int quality, int width, int height) { return (uint8_t *) RgbToJpeg((uchar *) image, width, height, *size, quality); } ////////////////////////////////////////////////////////////////////////////// // cPlugin ////////////////////////////////////////////////////////////////////////////// class cPluginSoftHdDevice:public cPlugin { public: cPluginSoftHdDevice(void); virtual ~ cPluginSoftHdDevice(void); virtual const char *Version(void); virtual const char *Description(void); virtual const char *CommandLineHelp(void); virtual bool ProcessArgs(int, char *[]); virtual bool Initialize(void); virtual bool Start(void); virtual void Stop(void); virtual void Housekeeping(void); virtual void MainThreadHook(void); virtual const char *MainMenuEntry(void); virtual cOsdObject *MainMenuAction(void); virtual cMenuSetupPage *SetupMenu(void); virtual bool SetupParse(const char *, const char *); virtual bool Service(const char *, void * = NULL); virtual const char **SVDRPHelpPages(void); virtual cString SVDRPCommand(const char *, const char *, int &); }; /** ** Initialize any member variables here. ** ** @note DON'T DO ANYTHING ELSE THAT MAY HAVE SIDE EFFECTS, REQUIRE GLOBAL ** VDR OBJECTS TO EXIST OR PRODUCE ANY OUTPUT! */ cPluginSoftHdDevice::cPluginSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); } /** ** Clean up after yourself! */ cPluginSoftHdDevice::~cPluginSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); ::SoftHdDeviceExit(); // keep ConfigX11Display ... } /** ** Return plugin version number. ** ** @returns version number as constant string. */ const char *cPluginSoftHdDevice::Version(void) { return VERSION; } /** ** Return plugin short description. ** ** @returns short description as constant string. */ const char *cPluginSoftHdDevice::Description(void) { return tr(DESCRIPTION); } /** ** Return a string that describes all known command line options. ** ** @returns command line help as constant string. */ const char *cPluginSoftHdDevice::CommandLineHelp(void) { return::CommandLineHelp(); } /** ** Process the command line arguments. */ bool cPluginSoftHdDevice::ProcessArgs(int argc, char *argv[]) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return::ProcessArgs(argc, argv); } /** ** Initializes the DVB devices. ** ** Must be called before accessing any DVB functions. ** ** @returns true if any devices are available. */ bool cPluginSoftHdDevice::Initialize(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); MyDevice = new cSoftHdDevice(); return true; } /** ** Start any background activities the plugin shall perform. */ bool cPluginSoftHdDevice::Start(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); if (!MyDevice->IsPrimaryDevice()) { isyslog("[softhddev] softhddevice %d is not the primary device!", MyDevice->DeviceNumber()); if (ConfigMakePrimary) { // Must be done in the main thread dsyslog("[softhddev] makeing softhddevice %d the primary device!", MyDevice->DeviceNumber()); DoMakePrimary = MyDevice->DeviceNumber() + 1; } } switch (::Start()) { case 1: //cControl::Launch(new cSoftHdControl); //cControl::Attach(); // FIXME: VDR overwrites the control SuspendMode = SUSPEND_NORMAL; break; case -1: SuspendMode = SUSPEND_DETACHED; break; case 0: default: break; } return true; } /** ** Shutdown plugin. Stop any background activities the plugin is ** performing. */ void cPluginSoftHdDevice::Stop(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); ::Stop(); } /** ** Perform any cleanup or other regular tasks. */ void cPluginSoftHdDevice::Housekeeping(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); // check if user is inactive, automatic enter suspend mode // FIXME: cControl prevents shutdown, disable this until fixed if (0 && SuspendMode == NOT_SUSPENDED && ShutdownHandler.IsUserInactive()) { // don't overwrite already suspended suspend mode cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; } ::Housekeeping(); } /** ** Create main menu entry. */ const char *cPluginSoftHdDevice::MainMenuEntry(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return ConfigHideMainMenuEntry ? NULL : tr(MAINMENUENTRY); } /** ** Perform the action when selected from the main VDR menu. */ cOsdObject *cPluginSoftHdDevice::MainMenuAction(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return new cSoftHdMenu("SoftHdDevice"); } /** ** Called for every plugin once during every cycle of VDR's main program ** loop. */ void cPluginSoftHdDevice::MainThreadHook(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); if (DoMakePrimary) { dsyslog("[softhddev]%s: switching primary device to %d\n", __FUNCTION__, DoMakePrimary); cDevice::SetPrimaryDevice(DoMakePrimary); DoMakePrimary = 0; } ::MainThreadHook(); } /** ** Return our setup menu. */ cMenuSetupPage *cPluginSoftHdDevice::SetupMenu(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return new cMenuSetupSoft; } /** ** Parse setup parameters ** ** @param name paramter name (case sensetive) ** @param value value as string ** ** @returns true if the parameter is supported. */ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value) { int i; //dsyslog("[softhddev]%s: '%s' = '%s'\n", __FUNCTION__, name, value); if (!strcasecmp(name, "MakePrimary")) { ConfigMakePrimary = atoi(value); return true; } if (!strcasecmp(name, "HideMainMenuEntry")) { ConfigHideMainMenuEntry = atoi(value); return true; } if (!strcasecmp(name, "DetachFromMainMenu")) { ConfigDetachFromMainMenu = atoi(value); return true; } if (!strcasecmp(name, "Osd.Width")) { ConfigOsdWidth = atoi(value); VideoSetOsdSize(ConfigOsdWidth, ConfigOsdHeight); return true; } if (!strcasecmp(name, "Osd.Height")) { ConfigOsdHeight = atoi(value); VideoSetOsdSize(ConfigOsdWidth, ConfigOsdHeight); return true; } if (!strcasecmp(name, "Suspend.Close")) { ConfigSuspendClose = atoi(value); return true; } if (!strcasecmp(name, "Suspend.X11")) { ConfigSuspendX11 = atoi(value); return true; } if (!strcasecmp(name, "Video4to3DisplayFormat")) { Config4to3DisplayFormat = atoi(value); VideoSet4to3DisplayFormat(Config4to3DisplayFormat); return true; } if (!strcasecmp(name, "VideoOtherDisplayFormat")) { ConfigOtherDisplayFormat = atoi(value); VideoSetOtherDisplayFormat(ConfigOtherDisplayFormat); return true; } if (!strcasecmp(name, "Background")) { VideoSetBackground(ConfigVideoBackground = strtoul(value, NULL, 0)); return true; } if (!strcasecmp(name, "StudioLevels")) { VideoSetStudioLevels(ConfigVideoStudioLevels = atoi(value)); return true; } if (!strcasecmp(name, "60HzMode")) { VideoSet60HzMode(ConfigVideo60HzMode = atoi(value)); return true; } if (!strcasecmp(name, "SoftStartSync")) { VideoSetSoftStartSync(ConfigVideoSoftStartSync = atoi(value)); return true; } if (!strcasecmp(name, "BlackPicture")) { VideoSetBlackPicture(ConfigVideoBlackPicture = atoi(value)); return true; } if (!strcasecmp(name, "ClearOnSwitch")) { ConfigVideoClearOnSwitch = atoi(value); return true; } if (!strcasecmp(name, "Brightness")) { VideoSetBrightness(ConfigVideoBrightness = atoi(value)); return true; } if (!strcasecmp(name, "Contrast")) { VideoSetContrast(ConfigVideoContrast = atoi(value)); return true; } if (!strcasecmp(name, "Saturation")) { VideoSetSaturation(ConfigVideoSaturation = atoi(value)); return true; } if (!strcasecmp(name, "Hue")) { VideoSetHue(ConfigVideoHue = atoi(value)); return true; } for (i = 0; i < RESOLUTIONS; ++i) { char buf[128]; snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Scaling"); if (!strcasecmp(name, buf)) { ConfigVideoScaling[i] = atoi(value); VideoSetScaling(ConfigVideoScaling); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Deinterlace"); if (!strcasecmp(name, buf)) { ConfigVideoDeinterlace[i] = atoi(value); VideoSetDeinterlace(ConfigVideoDeinterlace); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace"); if (!strcasecmp(name, buf)) { ConfigVideoSkipChromaDeinterlace[i] = atoi(value); VideoSetSkipChromaDeinterlace(ConfigVideoSkipChromaDeinterlace); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "InverseTelecine"); if (!strcasecmp(name, buf)) { ConfigVideoInverseTelecine[i] = atoi(value); VideoSetInverseTelecine(ConfigVideoInverseTelecine); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Denoise"); if (!strcasecmp(name, buf)) { ConfigVideoDenoise[i] = atoi(value); VideoSetDenoise(ConfigVideoDenoise); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Sharpen"); if (!strcasecmp(name, buf)) { ConfigVideoSharpen[i] = atoi(value); VideoSetSharpen(ConfigVideoSharpen); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutTopBottom"); if (!strcasecmp(name, buf)) { ConfigVideoCutTopBottom[i] = atoi(value); VideoSetCutTopBottom(ConfigVideoCutTopBottom); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutLeftRight"); if (!strcasecmp(name, buf)) { ConfigVideoCutLeftRight[i] = atoi(value); VideoSetCutLeftRight(ConfigVideoCutLeftRight); return true; } } if (!strcasecmp(name, "AutoCrop.Interval")) { VideoSetAutoCrop(ConfigAutoCropInterval = atoi(value), ConfigAutoCropDelay, ConfigAutoCropTolerance); ConfigAutoCropEnabled = ConfigAutoCropInterval != 0; return true; } if (!strcasecmp(name, "AutoCrop.Delay")) { VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay = atoi(value), ConfigAutoCropTolerance); return true; } if (!strcasecmp(name, "AutoCrop.Tolerance")) { VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance = atoi(value)); return true; } if (!strcasecmp(name, "AudioDelay")) { VideoSetAudioDelay(ConfigVideoAudioDelay = atoi(value)); return true; } if (!strcasecmp(name, "AudioDrift")) { CodecSetAudioDrift(ConfigAudioDrift = atoi(value)); return true; } if (!strcasecmp(name, "AudioPassthrough")) { int i; i = atoi(value); AudioPassthroughState = i > 0; ConfigAudioPassthrough = abs(i); if (AudioPassthroughState) { CodecSetAudioPassthrough(ConfigAudioPassthrough); } else { CodecSetAudioPassthrough(0); } return true; } if (!strcasecmp(name, "AudioDownmix")) { CodecSetAudioDownmix(ConfigAudioDownmix = atoi(value)); return true; } if (!strcasecmp(name, "AudioSoftvol")) { AudioSetSoftvol(ConfigAudioSoftvol = atoi(value)); return true; } if (!strcasecmp(name, "AudioNormalize")) { ConfigAudioNormalize = atoi(value); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); return true; } if (!strcasecmp(name, "AudioMaxNormalize")) { ConfigAudioMaxNormalize = atoi(value); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); return true; } if (!strcasecmp(name, "AudioCompression")) { ConfigAudioCompression = atoi(value); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); return true; } if (!strcasecmp(name, "AudioMaxCompression")) { ConfigAudioMaxCompression = atoi(value); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); return true; } if (!strcasecmp(name, "AudioStereoDescent")) { ConfigAudioStereoDescent = atoi(value); AudioSetStereoDescent(ConfigAudioStereoDescent); return true; } if (!strcasecmp(name, "AudioBufferTime")) { ConfigAudioBufferTime = atoi(value); return true; } if (!strcasecmp(name, "AudioAutoAES")) { ConfigAudioAutoAES = atoi(value); AudioSetAutoAES(ConfigAudioAutoAES); return true; } #ifdef USE_PIP if (!strcasecmp(name, "pip.X")) { ConfigPipX = atoi(value); return true; } if (!strcasecmp(name, "pip.Y")) { ConfigPipY = atoi(value); return true; } if (!strcasecmp(name, "pip.Width")) { ConfigPipWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Height")) { ConfigPipHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoX")) { ConfigPipVideoX = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoY")) { ConfigPipVideoY = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoWidth")) { ConfigPipVideoWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoHeight")) { ConfigPipVideoHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.X")) { ConfigPipAltX = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Y")) { ConfigPipAltY = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Width")) { ConfigPipAltWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Height")) { ConfigPipAltHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoX")) { ConfigPipAltVideoX = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoY")) { ConfigPipAltVideoY = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoWidth")) { ConfigPipAltVideoWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoHeight")) { ConfigPipAltVideoHeight = atoi(value); return true; } #endif #ifdef USE_SCREENSAVER if (!strcasecmp(name, "EnableDPMSatBlackScreen")) { ConfigEnableDPMSatBlackScreen = atoi(value); SetDPMSatBlackScreen(ConfigEnableDPMSatBlackScreen); return true; } #endif return false; } /** ** Receive requests or messages. ** ** @param id unique identification string that identifies the ** service protocol ** @param data custom data structure */ bool cPluginSoftHdDevice::Service(const char *id, void *data) { //dsyslog("[softhddev]%s: id %s\n", __FUNCTION__, id); if (strcmp(id, OSD_3DMODE_SERVICE) == 0) { SoftHDDevice_Osd3DModeService_v1_0_t *r; r = (SoftHDDevice_Osd3DModeService_v1_0_t *) data; VideoSetOsd3DMode(r->Mode); return true; } if (strcmp(id, ATMO_GRAB_SERVICE) == 0) { int width; int height; if (data == NULL) { return true; } if (SuspendMode != NOT_SUSPENDED) { return false; } SoftHDDevice_AtmoGrabService_v1_0_t *r = (SoftHDDevice_AtmoGrabService_v1_0_t *) data; if (r->structSize != sizeof(SoftHDDevice_AtmoGrabService_v1_0_t) || r->analyseSize < 64 || r->analyseSize > 256 || r->clippedOverscan < 0 || r->clippedOverscan > 200) { return false; } width = r->analyseSize * -1; // Internal marker for Atmo grab service height = r->clippedOverscan; r->img = VideoGrabService(&r->imgSize, &width, &height); if (r->img == NULL) { return false; } r->imgType = GRAB_IMG_RGBA_FORMAT_B8G8R8A8; r->width = width; r->height = height; return true; } if (strcmp(id, ATMO1_GRAB_SERVICE) == 0) { SoftHDDevice_AtmoGrabService_v1_1_t *r; if (!data) { return true; } if (SuspendMode != NOT_SUSPENDED) { return false; } r = (SoftHDDevice_AtmoGrabService_v1_1_t *) data; r->img = VideoGrabService(&r->size, &r->width, &r->height); if (!r->img) { return false; } return true; } return false; } //---------------------------------------------------------------------------- // cPlugin SVDRP //---------------------------------------------------------------------------- /** ** SVDRP commands help text. ** FIXME: translation? */ static const char *SVDRPHelpText[] = { "SUSP\n" "\040 Suspend plugin.\n\n" " The plugin is suspended to save energie. Depending on the setup\n" " 'softhddevice.Suspend.Close = 0' only the video and audio output\n" " is stopped or with 'softhddevice.Suspend.Close = 1' the video\n" " and audio devices are closed.\n" " If 'softhddevice.Suspend.X11 = 1' is set and the X11 server was\n" " started by the plugin, the X11 server would also be closed.\n" " (Stopping X11 while suspended isn't supported yet)\n", "RESU\n" "\040 Resume plugin.\n\n" " Resume the suspended plugin. The plugin could be suspended by\n" " the command line option '-s' or by a previous SUSP command.\n" " If the x11 server was stopped by the plugin, it will be\n" " restarted.", "DETA\n" "\040 Detach plugin.\n\n" " The plugin will be detached from the audio, video and DVB\n" " devices. Other programs or plugins can use them now.\n", "ATTA <-d display> <-a audio> <-p pass>\n" " Attach plugin.\n\n" " Attach the plugin to audio, video and DVB devices. Use:\n" " -d display\tdisplay of x11 server (fe. :0.0)\n" " -a audio\taudio device (fe. alsa: hw:0,0 oss: /dev/dsp)\n" " -p pass\t\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n", "PRIM \n" " Make the primary device.\n\n" " is the number of device. Without number softhddevice becomes\n" " the primary device. If becoming primary, the plugin is attached\n" " to the devices. If loosing primary, the plugin is detached from\n" " the devices.", "HOTK key\n" " Execute hotkey.\n\n" " key is the hotkey number, following are supported:\n" " 10: disable audio pass-through\n" " 11: enable audio pass-through\n" " 12: toggle audio pass-through\n" " 13: decrease audio delay by 10ms\n" " 14: increase audio delay by 10ms\n" " 15: toggle ac3 mixdown\n" " 20: disable fullscreen\n\040 21: enable fullscreen\n" " 22: toggle fullscreen\n" " 23: disable auto-crop\n\040 24: enable auto-crop\n" " 25: toggle auto-crop\n" " 30: stretch 4:3 to display\n\040 31: pillar box 4:3 in display\n" " 32: center cut-out 4:3 to display\n" " 39: rotate 4:3 to display zoom mode\n" " 40: stretch other aspect ratios to display\n" " 41: letter box other aspect ratios in display\n" " 42: center cut-out other aspect ratios to display\n" " 49: rotate other aspect ratios to display zoom mode\n", "STAT\n" "\040 Display SuspendMode of the plugin.\n\n" " reply code is 910 + SuspendMode\n" " SUSPEND_EXTERNAL == -1 (909)\n" " NOT_SUSPENDED == 0 (910)\n" " SUSPEND_NORMAL == 1 (911)\n" " SUSPEND_DETACHED == 2 (912)\n", "3DOF\n" "\040 3D OSD off.\n", "3DTB\n" "\040 3D OSD Top and Bottom.\n", "3DSB\n" "\040 3D OSD Side by Side.\n", "RAIS\n" "\040 Raise softhddevice window\n\n" " If Xserver is not started by softhddevice, the window which\n" " contains the softhddevice frontend will be raised to the front.\n", NULL }; /** ** Return SVDRP commands help pages. ** ** return a pointer to a list of help strings for all of the plugin's ** SVDRP commands. */ const char **cPluginSoftHdDevice::SVDRPHelpPages(void) { return SVDRPHelpText; } /** ** Handle SVDRP commands. ** ** @param command SVDRP command ** @param option all command arguments ** @param reply_code reply code */ cString cPluginSoftHdDevice::SVDRPCommand(const char *command, const char *option, __attribute__ ((unused)) int &reply_code) { if (!strcasecmp(command, "STAT")) { reply_code = 910 + SuspendMode; switch (SuspendMode) { case SUSPEND_EXTERNAL: return "SuspendMode is SUSPEND_EXTERNAL"; case NOT_SUSPENDED: return "SuspendMode is NOT_SUSPENDED"; case SUSPEND_NORMAL: return "SuspendMode is SUSPEND_NORMAL"; case SUSPEND_DETACHED: return "SuspendMode is SUSPEND_DETACHED"; } } if (!strcasecmp(command, "SUSP")) { if (cSoftHdControl::Player) { // already suspended return "SoftHdDevice already suspended"; } if (SuspendMode != NOT_SUSPENDED) { return "SoftHdDevice already detached"; } cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; return "SoftHdDevice is suspended"; } if (!strcasecmp(command, "RESU")) { if (SuspendMode == NOT_SUSPENDED) { return "SoftHdDevice already resumed"; } if (SuspendMode != SUSPEND_NORMAL) { return "can't resume SoftHdDevice"; } if (ShutdownHandler.GetUserInactiveTime()) { ShutdownHandler.SetUserInactiveTimeout(); } if (cSoftHdControl::Player) { // suspended cControl::Shutdown(); // not need, if not suspended } Resume(); SuspendMode = NOT_SUSPENDED; return "SoftHdDevice is resumed"; } if (!strcasecmp(command, "DETA")) { if (SuspendMode == SUSPEND_DETACHED) { return "SoftHdDevice already detached"; } if (cSoftHdControl::Player) { // already suspended return "can't suspend SoftHdDevice already suspended"; } cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; return "SoftHdDevice is detached"; } if (!strcasecmp(command, "ATTA")) { char *tmp; char *t; char *s; char *o; if (SuspendMode != SUSPEND_DETACHED) { return "can't attach SoftHdDevice not detached"; } if (!(tmp = strdup(option))) { return "out of memory"; } t = tmp; while ((s = strsep(&t, " \t\n\r"))) { if (!strcmp(s, "-d")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigX11Display); ConfigX11Display = strdup(o); X11DisplayName = ConfigX11Display; } else if (!strncmp(s, "-d", 2)) { free(ConfigX11Display); ConfigX11Display = strdup(s + 2); X11DisplayName = ConfigX11Display; } else if (!strcmp(s, "-a")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigAudioDevice); ConfigAudioDevice = strdup(o); AudioSetDevice(ConfigAudioDevice); } else if (!strncmp(s, "-a", 2)) { free(ConfigAudioDevice); ConfigAudioDevice = strdup(s + 2); AudioSetDevice(ConfigAudioDevice); } else if (!strcmp(s, "-p")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigPassthroughDevice); ConfigPassthroughDevice = strdup(o); AudioSetPassthroughDevice(ConfigPassthroughDevice); } else if (!strncmp(s, "-p", 2)) { free(ConfigPassthroughDevice); ConfigPassthroughDevice = strdup(s + 2); AudioSetPassthroughDevice(ConfigPassthroughDevice); } else if (*s) { free(tmp); return "unsupported option"; } } free(tmp); if (ShutdownHandler.GetUserInactiveTime()) { ShutdownHandler.SetUserInactiveTimeout(); } if (cSoftHdControl::Player) { // suspended cControl::Shutdown(); // not need, if not suspended } Resume(); SuspendMode = NOT_SUSPENDED; return "SoftHdDevice is attached"; } if (!strcasecmp(command, "HOTK")) { int hotk; hotk = strtol(option, NULL, 0); HandleHotkey(hotk); return "hot-key executed"; } if (!strcasecmp(command, "PRIM")) { int primary; primary = strtol(option, NULL, 0); if (!primary && MyDevice) { primary = MyDevice->DeviceNumber() + 1; } dsyslog("[softhddev] switching primary device to %d\n", primary); DoMakePrimary = primary; return "switching primary device requested"; } if (!strcasecmp(command, "3DOF")) { VideoSetOsd3DMode(0); return "3d off"; } if (!strcasecmp(command, "3DSB")) { VideoSetOsd3DMode(1); return "3d sbs"; } if (!strcasecmp(command, "3DTB")) { VideoSetOsd3DMode(2); return "3d tb"; } if (!strcasecmp(command, "RAIS")) { if (!ConfigStartX11Server) { VideoRaiseWindow(); } else { return "Raise not possible"; } return "Window raised"; } return NULL; } VDRPLUGINCREATOR(cPluginSoftHdDevice); // Don't touch this! vdr-plugin-softhddevice/AGPL-3.0.txt0000644000175000017500000010333412644034136016727 0ustar tobiastobias GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Remote Network Interaction; Use with the GNU General Public License. Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . vdr-plugin-softhddevice/softhddevice.h0000644000175000017500000000145412644034136017765 0ustar tobiastobias/// /// @file softhddevice.h @brief software HD device plugin header file. /// /// Copyright (c) 2011, 2014 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 3bc44809950ec019529638cad9223a85d4ab5576 $ ////////////////////////////////////////////////////////////////////////////// vdr-plugin-softhddevice/vdr-softhddevice-9999.ebuild0000644000175000017500000000326412644034136022215 0ustar tobiastobias# Copyright 1999-2014 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 # $Header: $ EAPI="5" inherit vdr-plugin-2 git-2 RESTRICT="test" EGIT_REPO_URI="git://projects.vdr-developer.org/vdr-plugin-softhddevice.git" KEYWORDS="" DESCRIPTION="VDR Plugin: Software and GPU emulated HD output device" HOMEPAGE="http://projects.vdr-developer.org/projects/show/plg-softhddevice" LICENSE="AGPL-3" SLOT="0" IUSE="alsa +debug opengl oss vaapi vdpau xscreensaver" RDEPEND=">=media-video/vdr-2 x11-libs/libX11 >=x11-libs/libxcb-1.8 x11-libs/xcb-util-wm x11-libs/xcb-util-keysyms x11-libs/xcb-util-renderutil alsa? ( media-libs/alsa-lib ) opengl? ( virtual/opengl ) vaapi? ( x11-libs/libva virtual/ffmpeg[vaapi] ) vdpau? ( x11-libs/libvdpau virtual/ffmpeg[vdpau] )" DEPEND="${RDEPEND} virtual/pkgconfig x11-libs/xcb-util" REQUIRED_USE="opengl? ( vaapi ) || ( vaapi vdpau ) || ( alsa oss )" #VDR_CONFD_FILE="${FILESDIR}/confd-0.6.0" #VDR_RCADDON_FILE="${FILESDIR}/rc-addon-0.6.0.sh" pkg_setup() { vdr-plugin-2_pkg_setup append-cppflags -DHAVE_PTHREAD_NAME use debug && append-cppflags -DDEBUG -DOSD_DEBUG } src_prepare() { vdr-plugin-2_src_prepare BUILD_PARAMS+=" ALSA=$(usex alsa 1 0)" BUILD_PARAMS+=" OPENGL=$(usex opengl 1 0)" BUILD_PARAMS+=" OSS=$(usex oss 1 0)" BUILD_PARAMS+=" VAAPI=$(usex vaapi 1 0)" BUILD_PARAMS+=" VDPAU=$(usex vdpau 1 0)" BUILD_PARAMS+=" SCREENSAVER=$(usex xscreensaver 1 0)" if has_version ">=media-video/ffmpeg-0.8"; then BUILD_PARAMS+=" SWRESAMPLE=1" fi if has_version ">=media-video/libav-0.8"; then BUILD_PARAMS+=" AVRESAMPLE=1" fi } src_install() { vdr-plugin-2_src_install nonfatal dodoc ChangeLog Todo } vdr-plugin-softhddevice/softhddevice_service.h0000644000175000017500000000301012644034136021473 0ustar tobiastobias/// /// @file softhddev_service.h @brief software HD device service header file. /// /// Copyright (c) 2012 by durchflieger. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: c7c2d5e8b724515d7c767668aab717b27f4e4068 $ ////////////////////////////////////////////////////////////////////////////// #pragma once #define ATMO_GRAB_SERVICE "SoftHDDevice-AtmoGrabService-v1.0" #define ATMO1_GRAB_SERVICE "SoftHDDevice-AtmoGrabService-v1.1" #define OSD_3DMODE_SERVICE "SoftHDDevice-Osd3DModeService-v1.0" enum { GRAB_IMG_RGBA_FORMAT_B8G8R8A8 }; typedef struct { int structSize; // request data int analyseSize; int clippedOverscan; // reply data int imgType; int imgSize; int width; int height; void *img; } SoftHDDevice_AtmoGrabService_v1_0_t; typedef struct { int Mode; } SoftHDDevice_Osd3DModeService_v1_0_t; typedef struct { // request/reply data int width; int height; // reply data int size; void *img; } SoftHDDevice_AtmoGrabService_v1_1_t; vdr-plugin-softhddevice/po/0000755000175000017500000000000012644034136015557 5ustar tobiastobiasvdr-plugin-softhddevice/po/.gitignore0000644000175000017500000000003712644034136017547 0ustar tobiastobias# gitignore(5) file *.mo *.pot vdr-plugin-softhddevice/po/de_DE.po0000644000175000017500000006442012644034136017065 0ustar tobiastobias# SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR , YEAR. # msgid "" msgstr "" "Project-Id-Version: VDR \n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2015-10-20 14:39+0200\n" "PO-Revision-Date: blabla\n" "Last-Translator: blabla\n" "Language-Team: blabla\n" "Language: german\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" #, c-format msgid "audio: %dHz sample-rate unsupported\n" msgstr "" #, c-format msgid "audio: %d channels unsupported\n" msgstr "" msgid "audio: out of ring buffers\n" msgstr "" #, c-format msgid "audio/alsa: avail underrun error? '%s'\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_avail_update(): %s\n" msgstr "" #, c-format msgid "audio/alsa: broken driver %d state '%s'\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_start(): %s\n" msgstr "" msgid "audio/alsa: bytes lost -> out of sync\n" msgstr "" #, c-format msgid "audio/alsa: writei underrun error? '%s'\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_writei failed: %s\n" msgstr "" msgid "audio/alsa: not all frames written\n" msgstr "" #, c-format msgid "audio: snd_pcm_drop(): %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_prepare(): %s\n" msgstr "" #, c-format msgid "audio/alsa: wait underrun error? '%s'\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_wait(): %s\n" msgstr "" #, c-format msgid "audio/alsa: using %sdevice '%s'\n" msgstr "" #, c-format msgid "audio/alsa: playback open '%s' error: %s\n" msgstr "" #, c-format msgid "audio/alsa: can't set block mode: %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_hw_params_any: no configurations available: %s\n" msgstr "" #, c-format msgid "audio/alsa: supports pause: %s\n" msgstr "" #, c-format msgid "audio/alsa: can't open mixer '%s'\n" msgstr "" #, c-format msgid "audio/alsa: set params error: %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_sw_params_current failed: %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_sw_params_get_boundary failed: %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_sw_params_set_silence_size failed: %s\n" msgstr "" #, c-format msgid "audio: snd_pcm_sw_params failed: %s\n" msgstr "" #, c-format msgid "audio/alsa: start delay %ums\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_pause(): %s\n" msgstr "" #, c-format msgid "audio/alsa: snd_pcm_prepare(): %s\n" msgstr "" msgid "audio/alsa: still paused\n" msgstr "" #, c-format msgid "snd_pcm_pause(): %s\n" msgstr "" #, c-format msgid "snd_pcm_drop(): %s\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n" msgstr "" #, c-format msgid "audio/oss: write error: %s\n" msgstr "" msgid "audio/oss: error not all bytes written\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n" msgstr "" #, c-format msgid "audio/oss: error poll %s\n" msgstr "" #, c-format msgid "audio/oss: using %sdevice '%s'\n" msgstr "" #, c-format msgid "audio/oss: can't open dsp device '%s': %s\n" msgstr "" #, c-format msgid "audio/oss: ioctl(MIXER_WRITE): %s\n" msgstr "" #, c-format msgid "audio/oss: can't open mixer device '%s': %s\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n" msgstr "" #, c-format msgid "audio/oss: channel '%s' not supported\n" msgstr "" #, c-format msgid "audio/oss: channel '%s' not found\n" msgstr "" msgid "audio/oss: should not happen\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_SETFMT): %s\n" msgstr "" msgid "audio/oss: device doesn't support 16 bit sample format.\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n" msgstr "" #, c-format msgid "audio/oss: device doesn't support %d channels.\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_SPEED): %s\n" msgstr "" #, c-format msgid "audio/oss: device doesn't support %dHz sample rate.\n" msgstr "" #, c-format msgid "audio/oss: ioctl(SNDCTL_DSP_POLICY): %s\n" msgstr "" #, c-format msgid "audio/oss: delay %ums\n" msgstr "" #, c-format msgid "audio: can't set channels %d sample-rate %dHz\n" msgstr "" msgid "audio: can't cancel play thread\n" msgstr "" #, c-format msgid "audio: can't place %d samples in ring buffer\n" msgstr "" msgid "audio: flush out of ring buffers\n" msgstr "" #, c-format msgid "audio: '%s' output module used\n" msgstr "" #, c-format msgid "audio: '%s' output module isn't supported\n" msgstr "" #, c-format msgid "audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n" msgstr "" msgid "codec: buggy libav, use ffmpeg\n" msgstr "" msgid "codec: can't allocate vodeo decoder\n" msgstr "" msgid "codec: missing close\n" msgstr "" #, c-format msgid "codec: codec ID %#06x not found\n" msgstr "" msgid "codec: can't allocate video codec context\n" msgstr "" msgid "codec: can't open video codec!\n" msgstr "" msgid "codec: can't allocate video decoder frame buffer\n" msgstr "" msgid "codec: can't allocate audio decoder\n" msgstr "" msgid "codec: can't allocate audio decoder frame buffer\n" msgstr "" msgid "codec: can't allocate audio codec context\n" msgstr "" msgid "codec: can't open audio codec\n" msgstr "" msgid "codec/audio: decoded data smaller than encoded\n" msgstr "" msgid "codec/audio: resample setup error\n" msgstr "" msgid "codec/audio: overwrite resample\n" msgstr "" msgid "codec/audio: AvResample setup error\n" msgstr "" msgid "codec: latm\n" msgstr "" msgid "codec: error audio data\n" msgstr "" msgid "codec: error more than one frame data\n" msgstr "" msgid "codec/audio: can't setup resample\n" msgstr "" msgid "codec/audio: can't open resample\n" msgstr "" msgid "codec/audio: latm\n" msgstr "" msgid "codec/audio: bad audio frame\n" msgstr "" msgid "codec/audio: error more than one frame data\n" msgstr "" msgid "codec/audio: no frame\n" msgstr "" msgid "pesdemux: out of memory\n" msgstr "" msgid "pesdemux: mpeg1 pes packet unsupported\n" msgstr "" #, c-format msgid "softhddev: LPCM %d bits per sample aren't supported\n" msgstr "" #, c-format msgid "softhddev: LPCM %d sample-rate is unsupported\n" msgstr "" #, c-format msgid "softhddev: LPCM %d channels are unsupported\n" msgstr "" msgid "tsdemux: transport stream out of sync\n" msgstr "" msgid "[softhddev] invalid PES audio packet\n" msgstr "" msgid "[softhddev] empty audio packet\n" msgstr "" #, c-format msgid "[softhddev] invalid audio packet %d bytes\n" msgstr "" msgid "[softhddev] audio buffer too small\n" msgstr "" #, c-format msgid "[softhddev] invalid LPCM audio packet %d bytes\n" msgstr "" #, c-format msgid "[softhddev] LPCM %d bits per sample aren't supported\n" msgstr "" #, c-format msgid "[softhddev] LPCM %d sample-rate is unsupported\n" msgstr "" #, c-format msgid "[softhddev] LPCM %d channels are unsupported\n" msgstr "" msgid "[softhddev] out of memory\n" msgstr "" #, c-format msgid "video: packet buffer too small for %d\n" msgstr "" msgid "video: no empty slot in packet ringbuffer\n" msgstr "" msgid "[softhddev] too short PES video packet\n" msgstr "" msgid "[softhddev] invalid PES video packet\n" msgstr "" #, c-format msgid "[softhddev] %d invalid PES video packet(s)\n" msgstr "" msgid "[softhddev] empty video packet\n" msgstr "" #, c-format msgid "[softhddev] invalid video packet %d/%d bytes\n" msgstr "" #, c-format msgid "[softhddev] empty video packet %d bytes\n" msgstr "" #, c-format msgid "softhddev: %s called without hw decoder\n" msgstr "" msgid "[softhddev] invalid still video packet\n" msgstr "" msgid "[softhddev] no codec known for still picture\n" msgstr "" #, c-format msgid "Bad formated geometry please use: [=][{xX}][{+-}{+-}]\n" msgstr "" #, c-format msgid "Workaround '%s' unsupported\n" msgstr "" #, c-format msgid "We need no long options\n" msgstr "" #, c-format msgid "Missing argument for option '%c'\n" msgstr "" #, c-format msgid "Unknown option '%c'\n" msgstr "" #, c-format msgid "Unhandled argument '%s'\n" msgstr "" msgid "x-setup: No X server configured!\n" msgstr "" msgid "x-setup: too many arguments for xserver\n" msgstr "" #, c-format msgid "x-setup: Failed to start X server '%s'\n" msgstr "" #, c-format msgid "[softhddev] ready%s\n" msgstr "" msgid "A software and GPU emulated HD device" msgstr "Ein Software und GPU emulieres HD-Gerät" msgid "SoftHdDevice" msgstr "SoftHdDevice" msgid "[softhddev]: dirty area too big\n" msgstr "" msgid "show" msgstr "anzeigen" msgid "hide" msgstr "verstecken" msgid "General" msgstr "Allgemeines" msgid "Make primary device" msgstr "Erzeuge primäres Gerät" msgid "Hide main menu entry" msgstr "Verstecke Hauptmenüeintrag" msgid "Osd size" msgstr "OSD Größe" msgid "Osd width" msgstr "OSD Breite" msgid "Osd height" msgstr "OSD Höhe" msgid "Suspend" msgstr "Unterbrechen" msgid "Detach from main menu entry" msgstr "Detach durch Hauptmenüeintrag" msgid "Suspend closes video+audio" msgstr "Unterbrechen schließt Video+Audio" msgid "Suspend stops x11" msgstr "Unterbrechen stoppt X11" msgid "Video" msgstr "Video" msgid "Enable Screensaver(DPMS) at black screen" msgstr "Bildschirmschoner(DPMS) bei schwarzen Bild aktivieren" msgid "Video background color (RGB)" msgstr "Video Hintergrundfrabe (RGB)" msgid "Video background color (Alpha)" msgstr "Video Hintergrundfarbe (Alpha)" msgid "Use studio levels (vdpau only)" msgstr "Benutze Studio Levels (nur vdpau)" msgid "60hz display mode" msgstr "60Hz Anzeigemodus" msgid "Soft start a/v sync" msgstr "Sanftanlauf A/V Sync" msgid "Black during channel switch" msgstr "Schwarz während Kanalwechsel" msgid "Clear decoder on channel switch" msgstr "Decoder bei Kanalwechsel leeren" msgid "Brightness (-1000..1000) (vdpau)" msgstr "Helligkeit (-1000..1000) (vdpau)" msgid "min" msgstr "min" msgid "max" msgstr "max" msgid "Contrast (0..10000) (vdpau)" msgstr "Kontrast (0..10000) (vdpau)" msgid "Saturation (0..10000) (vdpau)" msgstr "Sättigung (0..10000) (vdpau)" msgid "Hue (-3141..3141) (vdpau)" msgstr "Farbton (-3141..3141) (vdpau)" msgid "Scaling" msgstr "Skalierung" msgid "Deinterlace" msgstr "Deinterlace" msgid "SkipChromaDeinterlace (vdpau)" msgstr "" msgid "Inverse Telecine (vdpau)" msgstr "Inverse Filmübertragung (vdpau)" msgid "Denoise (0..1000) (vdpau)" msgstr "" msgid "off" msgstr "aus" msgid "Sharpen (-1000..1000) (vdpau)" msgstr "Schärfen (-1000..1000) (vdpau)" msgid "blur max" msgstr "Weichzeichnen max." msgid "sharpen max" msgstr "Schärfen max." msgid "Cut top and bottom (pixel)" msgstr "Schneide oben und unten ab (Pixel)" msgid "Cut left and right (pixel)" msgstr "Schneide links und rechts ab (Pixel)" msgid "Auto-crop" msgstr "" msgid "Autocrop interval (frames)" msgstr "" msgid "Autocrop delay (n * interval)" msgstr "" msgid "Autocrop tolerance (pixel)" msgstr "" msgid "Audio" msgstr "Audio" msgid "Audio/Video delay (ms)" msgstr "Audio/Video Verzögerung (ms)" msgid "Audio drift correction" msgstr "Audio Driftkorrektur" msgid "Pass-through default" msgstr "Pass-Through-Standard" msgid " PCM pass-through" msgstr "" msgid " AC-3 pass-through" msgstr "" msgid " E-AC-3 pass-through" msgstr "" msgid "Enable (E-)AC-3 (decoder) downmix" msgstr "Aktiviere (E-)AC-3 (decoder) downmix" msgid "Volume control" msgstr "Lautstärkesteuerung" msgid "Hardware" msgstr "Hardware" msgid "Software" msgstr "Software" msgid "Enable normalize volume" msgstr "Aktiviere Lautstärkenormalisierung" msgid " Max normalize factor (/1000)" msgstr " Max. Normalisierungsfaktor (/1000)" msgid "Enable volume compression" msgstr "Aktivieren Lautstärkekompression" msgid " Max compression factor (/1000)" msgstr " Max. Kompressionsfaktor (/1000)" msgid "Reduce stereo volume (/1000)" msgstr "Reduziere Steropegel (/1000)" msgid "Audio buffer size (ms)" msgstr "Audio Puffergröße (ms)" msgid "Enable automatic AES" msgstr "Aktiviere automatiche AES" msgid "Picture-In-Picture" msgstr "Bild in Bild (PIP)" msgid "Pip X (%)" msgstr "PIP X (%)" msgid "Pip Y (%)" msgstr "PIP Y (%)" msgid "Pip Width (%)" msgstr "PIP Breite (%)" msgid "Pip Height (%)" msgstr "PIP Höhe (%)" msgid "Video X (%)" msgstr "Video X (%)" msgid "Video Y (%)" msgstr "Video Y (%)" msgid "Video Width (%)" msgstr "Videobreite (%)" msgid "Video Height (%)" msgstr "Videohöhe (%)" msgid "Alternative Pip X (%)" msgstr "Alternative PIP X (%)" msgid "Alternative Pip Y (%)" msgstr "Alternative Pip Y (%)" msgid "Alternative Pip Width (%)" msgstr "Alternative PIP Breite (%)" msgid "Alternative Pip Height (%)" msgstr "Alternative PIP Höhe (%)" msgid "Alternative Video X (%)" msgstr "Alternative Video X (%)" msgid "Alternative Video Y (%)" msgstr "Alternative Video Y (%)" msgid "Alternative Video Width (%)" msgstr "Alternative Videobreite (%)" msgid "Alternative Video Height (%)" msgstr "Alternative Videohöhe (%)" #, c-format msgid "[softhddev]pip: invalid PES packet %d\n" msgstr "" msgid "[softhddev]pip: pes buffer too small\n" msgstr "" msgid "[softhddev]tsdemux: transport stream out of sync\n" msgstr "" msgid "Channel not available!" msgstr "Kanal nicht verfügbar!" msgid "Detach SoftHdDevice" msgstr "" msgid "Suspend SoftHdDevice" msgstr "Unterbreche SoftHdDevice" msgid "PIP toggle on/off: off" msgstr "PIP deaktivieren" msgid "PIP toggle on/off: on" msgstr "PIP aktivieren" msgid "PIP zapmode (not working)" msgstr "" msgid "PIP channel +" msgstr "PIP Kanal +" msgid "PIP channel -" msgstr "PIP Kanal -" msgid "PIP on/swap channels: swap" msgstr "" msgid "PIP on/swap channels: on" msgstr "" msgid "PIP swap position: normal" msgstr "" msgid "PIP swap position: alternative" msgstr "" msgid "PIP close" msgstr "PIP schließen" #, c-format msgid " Frames missed(%d) duped(%d) dropped(%d) total(%d)" msgstr " Frames verloren(%d) verdoppelt(%d) übersprungen(%d) Gesamt(%d)" msgid "pass-through disabled" msgstr "" msgid "pass-through enabled" msgstr "" #, c-format msgid "audio delay changed to %d" msgstr "" msgid "surround downmix enabled" msgstr "" msgid "surround downmix disabled" msgstr "" msgid "auto-crop disabled and freezed" msgstr "" msgid "auto-crop enabled" msgstr "" #, c-format msgid "[softhddev]: hot key %d is not supported\n" msgstr "" msgid "video/glx: can't make glx context current\n" msgstr "" msgid "video/glx: no v-sync\n" msgstr "" msgid "video/glx: no GLX support\n" msgstr "" #, c-format msgid "video/glx: glx version %d.%d\n" msgstr "" msgid "video/glx: can't get a RGB visual\n" msgstr "" msgid "video/glx: no valid visual found\n" msgstr "" msgid "video/glx: need atleast 8-bits per RGB\n" msgstr "" msgid "video/glx: can't create glx context\n" msgstr "" msgid "video/glx: can't disable v-sync\n" msgstr "" msgid "video/glx: v-sync disabled\n" msgstr "" msgid "video/glx: can't enable v-sync\n" msgstr "" msgid "video/glx: v-sync enabled\n" msgstr "" msgid "video/vaapi: no osd subpicture yet\n" msgstr "" msgid "video/vaapi: can't associate subpicture\n" msgstr "" #, c-format msgid "video/vaapi: can't deassociate %d surfaces\n" msgstr "" msgid "video/vaapi: surface needed not set\n" msgstr "" #, c-format msgid "video/vaapi: can't create %d surfaces\n" msgstr "" #, c-format msgid "video/vaapi: can't destroy %d surfaces\n" msgstr "" msgid "video/vaapi: vaQuerySurface failed\n" msgstr "" msgid "video/vaapi: out of surfaces\n" msgstr "" #, c-format msgid "video/vaapi: release surface %#010x, which is not in use\n" msgstr "" msgid "video/vaapi: out of decoders\n" msgstr "" msgid "video/vaapi: out of memory\n" msgstr "" #, c-format msgid "video/vaapi: invalid surface in ringbuffer\n" msgstr "" msgid "video/vaapi: vaSyncSurface failed\n" msgstr "" msgid "video/vaapi: can't destroy image!\n" msgstr "" msgid "video/vaapi: can't destroy context!\n" msgstr "" msgid "video/vaapi: can't destroy config!\n" msgstr "" msgid "video/vaapi: can't deassociate black surfaces\n" msgstr "" msgid "video/vaapi: can't destroy a surface\n" msgstr "" msgid "video/vaapi: can't destroy glx surface!\n" msgstr "" msgid "codec: vaQueryConfigProfiles failed" msgstr "" msgid "codec: vaQueryConfigEntrypoints failed" msgstr "" msgid "codec: unsupported: slow path\n" msgstr "" msgid "codec: can't create config" msgstr "" msgid "video/vaapi: can't create surfaces\n" msgstr "" msgid "codec: can't create context" msgstr "" msgid "video/vaapi: vaDeriveImage failed\n" msgstr "" msgid "video/vaapi: vaPutSurface failed\n" msgstr "" msgid "video/vaapi: can't destroy surfaces\n" msgstr "" #, c-format msgid "video/vaapi: Can't connect VA-API to X11 server on '%s'\n" msgstr "" #, c-format msgid "video/vaapi: Can't inititialize VA-API on '%s'\n" msgstr "" #, c-format msgid "video/vaapi: libva %d.%d (%s) initialized\n" msgstr "" msgid "video/vaapi: use vdpau bug workaround\n" msgstr "" msgid "video/vaapi: Can't get background-color attribute\n" msgstr "" #, c-format msgid "video/vaapi: background-color is %s\n" msgstr "" msgid "supported" msgstr "" msgid "unsupported" msgstr "" msgid "video/glx: glx error\n" msgstr "" #, c-format msgid "video/vaapi: unsupported pixel format %d\n" msgstr "" msgid "video/vaapi: vaQueryImageFormats failed\n" msgstr "" msgid "video/vaapi: can't create image!\n" msgstr "" msgid "video/glx: can't create glx surfaces\n" msgstr "" #, c-format msgid "video/vaapi: can't create config '%s'\n" msgstr "" #, c-format msgid "video/vaapi: can't create context '%s'\n" msgstr "" msgid "codec: can't get attributes" msgstr "" msgid "codec: YUV 420 supported\n" msgstr "" msgid "codec: YUV 422 supported\n" msgstr "" msgid "codec: YUV 444 supported\n" msgstr "" msgid "codec: YUV 420 not supported\n" msgstr "" #, c-format msgid "codec: can't create config '%s'\n" msgstr "" #, c-format msgid "codec: can't create context '%s'\n" msgstr "" #, c-format msgid "video/vaapi: vaPutSurface failed %d\n" msgstr "" #, c-format msgid "video/vaapi: gpu hung %dms %d\n" msgstr "" #, c-format msgid "video/vaapi: surface %#010x not ready: still displayed %d\n" msgstr "" msgid "video/glx: vaCopySurfaceGLX failed\n" msgstr "" #, c-format msgid "video/vaapi: can't get auto-crop image %d\n" msgstr "" msgid "video/vaapi: can't map auto-crop image!\n" msgstr "" msgid "video/vaapi: can't unmap auto-crop image!\n" msgstr "" #, c-format msgid "video: output buffer full, dropping frame (%d/%d)\n" msgstr "" #, c-format msgid "video/vaapi: can't create a surface: %s\n" msgstr "" #, c-format msgid "video/vaapi: can't associate subpicture: %s\n" msgstr "" #, c-format msgid "video/vaapi: can't create image: %s\n" msgstr "" #, c-format msgid "video/vaapi: can't map the image: %s\n" msgstr "" msgid "video/vaapi: can't unmap the image!\n" msgstr "" msgid "video/vaapi: can't put image!\n" msgstr "" msgid "video/vaapi: can't unmap image buffer\n" msgstr "" msgid "video/vaapi: can't get source image\n" msgstr "" #, c-format msgid "video/vaapi: vaDeriveImage failed %d\n" msgstr "" #, c-format msgid "video/vaapi: can't put image: %d!\n" msgstr "" msgid "video/vaapi: stream <-> surface size mismatch\n" msgstr "" msgid "video/vaapi: can't map the image!\n" msgstr "" #, c-format msgid "video/vaapi: can't put image err:%d!\n" msgstr "" #, c-format msgid "video: display buffer empty, duping frame (%d/%d) %d\n" msgstr "" #, c-format msgid "video: decoder buffer empty, duping frame (%d/%d) %d v-buf\n" msgstr "" msgid "video/vaapi: FIXME: SetBackground not supported\n" msgstr "" msgid "video/vaapi: can't map osd image buffer\n" msgstr "" msgid "video/vaapi: can't unmap osd image buffer\n" msgstr "" msgid "video/vaapi: can't get subpicture formats" msgstr "" msgid "video/vaapi: can't find a supported subpicture format" msgstr "" msgid "video/vaapi: supports unscaled osd\n" msgstr "" msgid "video/vaapi: can't create osd image\n" msgstr "" msgid "video/vaapi: can't create subpicture\n" msgstr "" msgid "video/vaapi: can't destroy subpicture\n" msgstr "" msgid "video/vdpau: surface needed not set\n" msgstr "" #, c-format msgid "video/vdpau: can't create video surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy video surface: %s\n" msgstr "" msgid "video/vdpau: out of surfaces\n" msgstr "" #, c-format msgid "video/vdpau: release surface %#08x, which is not in use\n" msgstr "" #, c-format msgid "video/vdpau: can't set mixer feature enables: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't generate CSC matrix: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't set mixer attribute values: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create video mixer: %s\n" msgstr "" msgid "video/vdpau: out of decoders\n" msgstr "" msgid "video/vdpau: out of memory\n" msgstr "" msgid "video/vdpau: need 1 future, 1 current, 1 back and 1 work surface\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy video decoder: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy video mixer: %s\n" msgstr "" msgid "video/vdpau: decoder not in decoder list.\n" msgstr "" #, c-format msgid "video/vdpau: Can't get function address of '%s': %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create presentation queue target: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create presentation queue: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create output surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create grab render output surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy output surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy grab render output surface: %s\n" msgstr "" msgid "video/vdpau preemption device not our device\n" msgstr "" #, c-format msgid "video/vdpau: Can't create vdp device on display '%s'\n" msgstr "" msgid "video/vdpau: Can't get function address of 'GetErrorString'\n" msgstr "" #, c-format msgid "video/vdpau: VDPAU API version: %u\n" msgstr "" #, c-format msgid "video/vdpau: VDPAU information: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't register preemption callback: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't query feature '%s': %s\n" msgstr "" #, c-format msgid "video/vdpau: highest supported high quality scaling %d\n" msgstr "" msgid "video/vdpau: high quality scaling unsupported\n" msgstr "" #, c-format msgid "video/vdpau: feature deinterlace temporal %s\n" msgstr "" #, c-format msgid "video/vdpau: feature deinterlace temporal spatial %s\n" msgstr "" #, c-format msgid "video/vdpau: attribute skip chroma deinterlace %s\n" msgstr "" #, c-format msgid "video/vdpau: can't query video surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: 4:2:0 chroma format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: 4:2:2 chroma format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: 4:4:4 chroma format with %dx%d supported\n" msgstr "" msgid "video/vdpau: doesn't support yuvy video surface\n" msgstr "" msgid "video/vdpau: doesn't support yv12 video surface\n" msgstr "" #, c-format msgid "video/vdpau: can't query output surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: 8bit BGRA format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: 8bit RGBA format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: 10bit RGBA format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: 8bit BRGA format with %dx%d supported\n" msgstr "" #, c-format msgid "video/vdpau: can't get video surface parameters: %s\n" msgstr "" msgid "video/vdpau: video surface chroma type mismatch\n" msgstr "" msgid "video/vdpau: video surface size mismatch\n" msgstr "" #, c-format msgid "video/vdpau: can't create decoder: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't query decoder capabilities: %s\n" msgstr "" msgid "video/vdpau: no valid vdpau pixfmt found\n" msgstr "" msgid "video/vdpau: no valid profile found\n" msgstr "" #, c-format msgid "video/vdpau: unsupported chroma type %d\n" msgstr "" #, c-format msgid "video/vdpau: can't get video surface bits: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't get output surface parameters: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't render output surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: unsupported rgba format %d\n" msgstr "" #, c-format msgid "video/vdpau: can't get video surface bits native: %s\n" msgstr "" #, c-format msgid "video/vdpau: output buffer full, dropping frame (%d/%d)\n" msgstr "" #, c-format msgid "video/vdpau: pixel format %d not supported\n" msgstr "" #, c-format msgid "video/vdpau: can't put video surface bits: %s\n" msgstr "" msgid "video: get hwaccel context, not supported\n" msgstr "" #, c-format msgid "video/vdpau: can't render bitmap surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: %d surface deinterlace unsupported\n" msgstr "" #, c-format msgid "video/vdpau: can't render mixer: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't query status: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't block queue: %s\n" msgstr "" #, c-format msgid "video/vdpau: missed frame (%d/%d)\n" msgstr "" #, c-format msgid "video/vdpau: can't queue display: %s\n" msgstr "" msgid "video/vdpau: osd too big: unsupported\n" msgstr "" #, c-format msgid "video/vdpau: bitmap surface put bits failed: %s\n" msgstr "" #, c-format msgid "video/vdpau: output surface put bits failed: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't create bitmap surface: %s\n" msgstr "" #, c-format msgid "video/vdpau: can't destroy bitmap surface: %s\n" msgstr "" msgid "video: fatal i/o error\n" msgstr "" #, c-format msgid "video/event: No symbol for %d\n" msgstr "" msgid "video: can't lock thread\n" msgstr "" msgid "video: can't unlock thread\n" msgstr "" msgid "video: can't queue cancel video display thread\n" msgstr "" msgid "video: can't cancel video display thread\n" msgstr "" #, c-format msgid "video: repeated pict %d found, but not handled\n" msgstr "" #, c-format msgid "video/vdpau: decoder rendering failed: %s\n" msgstr "" #, c-format msgid "video/vdpau: %s: decoder render too slow %ums\n" msgstr "" msgid "video/vdpau: draw render state, without vdpau enabled\n" msgstr "" msgid "video: out of memory\n" msgstr "" msgid "softhddev: grab unsupported\n" msgstr "" #, c-format msgid "video: Can't connect to X11 server on '%s'\n" msgstr "" #, c-format msgid "video: Can't initialize X11 thread support on '%s'\n" msgstr "" msgid "video: Can't convert XLIB display to XCB connection\n" msgstr "" #, c-format msgid "video: '%s' output module isn't supported\n" msgstr "" msgid "video: error closing display\n" msgstr "" vdr-plugin-softhddevice/audio.c0000644000175000017500000023466412644034136016425 0ustar tobiastobias/// /// @file audio.c @brief Audio module /// /// Copyright (c) 2009 - 2014 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 77fa65030b179e78c13d0bf69a7cc417dae89e1a $ ////////////////////////////////////////////////////////////////////////////// /// /// @defgroup Audio The audio module. /// /// This module contains all audio output functions. /// /// ALSA PCM/Mixer api is supported. /// @see http://www.alsa-project.org/alsa-doc/alsa-lib /// /// @note alsa async playback is broken, don't use it! /// /// OSS PCM/Mixer api is supported. /// @see http://manuals.opensound.com/developer/ /// /// /// @todo FIXME: there can be problems with little/big endian. /// //#define USE_ALSA ///< enable alsa support //#define USE_OSS ///< enable OSS support #define USE_AUDIO_THREAD ///< use thread for audio playback #define USE_AUDIO_MIXER ///< use audio module mixer #include #include #include #include #include #include #include #define _(str) gettext(str) ///< gettext shortcut #define _N(str) str ///< gettext_noop shortcut #ifdef USE_ALSA #include #endif #ifdef USE_OSS #include #include #include #include // SNDCTL_DSP_HALT_OUTPUT compatibility #ifndef SNDCTL_DSP_HALT_OUTPUT # if defined(SNDCTL_DSP_RESET_OUTPUT) # define SNDCTL_DSP_HALT_OUTPUT SNDCTL_DSP_RESET_OUTPUT # elif defined(SNDCTL_DSP_RESET) # define SNDCTL_DSP_HALT_OUTPUT SNDCTL_DSP_RESET # else # error "No valid SNDCTL_DSP_HALT_OUTPUT found." # endif #endif #include #include #include #include #endif #ifdef USE_AUDIO_THREAD #ifndef __USE_GNU #define __USE_GNU #endif #include #ifndef HAVE_PTHREAD_NAME /// only available with newer glibc #define pthread_setname_np(thread, name) #endif #endif #include "iatomic.h" // portable atomic_t #include "ringbuffer.h" #include "misc.h" #include "audio.h" //---------------------------------------------------------------------------- // Declarations //---------------------------------------------------------------------------- /** ** Audio output module structure and typedef. */ typedef struct _audio_module_ { const char *Name; ///< audio output module name int (*const Thread) (void); ///< module thread handler void (*const FlushBuffers) (void); ///< flush sample buffers int64_t(*const GetDelay) (void); ///< get current audio delay void (*const SetVolume) (int); ///< set output volume int (*const Setup) (int *, int *, int); ///< setup channels, samplerate void (*const Play) (void); ///< play audio void (*const Pause) (void); ///< pause audio void (*const Init) (void); ///< initialize audio output module void (*const Exit) (void); ///< cleanup audio output module } AudioModule; static const AudioModule NoopModule; ///< forward definition of noop module //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- char AudioAlsaDriverBroken; ///< disable broken driver message char AudioAlsaNoCloseOpen; ///< disable alsa close/open fix char AudioAlsaCloseOpenDelay; ///< enable alsa close/open delay fix static const char *AudioModuleName; ///< which audio module to use /// Selected audio module. static const AudioModule *AudioUsedModule = &NoopModule; static const char *AudioPCMDevice; ///< PCM device name static const char *AudioPassthroughDevice; ///< Passthrough device name static char AudioAppendAES; ///< flag automatic append AES static const char *AudioMixerDevice; ///< mixer device name static const char *AudioMixerChannel; ///< mixer channel name static char AudioDoingInit; ///> flag in init, reduce error static volatile char AudioRunning; ///< thread running / stopped static volatile char AudioPaused; ///< audio paused static volatile char AudioVideoIsReady; ///< video ready start early static int AudioSkip; ///< skip audio to sync to video static const int AudioBytesProSample = 2; ///< number of bytes per sample static int AudioBufferTime = 336; ///< audio buffer time in ms #ifdef USE_AUDIO_THREAD static pthread_t AudioThread; ///< audio play thread static pthread_mutex_t AudioMutex; ///< audio condition mutex static pthread_cond_t AudioStartCond; ///< condition variable static char AudioThreadStop; ///< stop audio thread #else static const int AudioThread; ///< dummy audio thread #endif static char AudioSoftVolume; ///< flag use soft volume static char AudioNormalize; ///< flag use volume normalize static char AudioCompression; ///< flag use compress volume static char AudioMute; ///< flag muted static int AudioAmplifier; ///< software volume factor static int AudioNormalizeFactor; ///< current normalize factor static const int AudioMinNormalize = 100; ///< min. normalize factor static int AudioMaxNormalize; ///< max. normalize factor static int AudioCompressionFactor; ///< current compression factor static int AudioMaxCompression; ///< max. compression factor static int AudioStereoDescent; ///< volume descent for stereo static int AudioVolume; ///< current volume (0 .. 1000) extern int VideoAudioDelay; ///< import audio/video delay /// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8) static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000; static int AudioChannelsInHw[9]; ///< table which channels are supported enum _audio_rates { ///< sample rates enumeration // HW: 32000 44100 48000 88200 96000 176400 192000 //Audio32000, ///< 32.0Khz Audio44100, ///< 44.1Khz Audio48000, ///< 48.0Khz //Audio88200, ///< 88.2Khz //Audio96000, ///< 96.0Khz //Audio176400, ///< 176.4Khz Audio192000, ///< 192.0Khz AudioRatesMax ///< max index }; /// table which rates are supported static int AudioRatesInHw[AudioRatesMax]; /// input to hardware channel matrix static int AudioChannelMatrix[AudioRatesMax][9]; /// rates tables (must be sorted by frequency) static const unsigned AudioRatesTable[AudioRatesMax] = { 44100, 48000, 192000 }; //---------------------------------------------------------------------------- // filter //---------------------------------------------------------------------------- static const int AudioNormSamples = 4096; ///< number of samples #define AudioNormMaxIndex 128 ///< number of average values /// average of n last sample blocks static uint32_t AudioNormAverage[AudioNormMaxIndex]; static int AudioNormIndex; ///< index into average table static int AudioNormReady; ///< index counter static int AudioNormCounter; ///< sample counter /** ** Audio normalizer. ** ** @param samples sample buffer ** @param count number of bytes in sample buffer */ static void AudioNormalizer(int16_t * samples, int count) { int i; int l; int n; uint32_t avg; int factor; int16_t *data; // average samples l = count / AudioBytesProSample; data = samples; do { n = l; if (AudioNormCounter + n > AudioNormSamples) { n = AudioNormSamples - AudioNormCounter; } avg = AudioNormAverage[AudioNormIndex]; for (i = 0; i < n; ++i) { int t; t = data[i]; avg += (t * t) / AudioNormSamples; } AudioNormAverage[AudioNormIndex] = avg; AudioNormCounter += n; if (AudioNormCounter >= AudioNormSamples) { if (AudioNormReady < AudioNormMaxIndex) { AudioNormReady++; } else { avg = 0; for (i = 0; i < AudioNormMaxIndex; ++i) { avg += AudioNormAverage[i] / AudioNormMaxIndex; } // calculate normalize factor if (avg > 0) { factor = ((INT16_MAX / 8) * 1000U) / (uint32_t) sqrt(avg); // smooth normalize AudioNormalizeFactor = (AudioNormalizeFactor * 500 + factor * 500) / 1000; if (AudioNormalizeFactor < AudioMinNormalize) { AudioNormalizeFactor = AudioMinNormalize; } if (AudioNormalizeFactor > AudioMaxNormalize) { AudioNormalizeFactor = AudioMaxNormalize; } } else { factor = 1000; } Debug(4, "audio/noramlize: avg %8d, fac=%6.3f, norm=%6.3f\n", avg, factor / 1000.0, AudioNormalizeFactor / 1000.0); } AudioNormIndex = (AudioNormIndex + 1) % AudioNormMaxIndex; AudioNormCounter = 0; AudioNormAverage[AudioNormIndex] = 0U; } data += n; l -= n; } while (l > 0); // apply normalize factor for (i = 0; i < count / AudioBytesProSample; ++i) { int t; t = (samples[i] * AudioNormalizeFactor) / 1000; if (t < INT16_MIN) { t = INT16_MIN; } else if (t > INT16_MAX) { t = INT16_MAX; } samples[i] = t; } } /** ** Reset normalizer. */ static void AudioResetNormalizer(void) { int i; AudioNormCounter = 0; AudioNormReady = 0; for (i = 0; i < AudioNormMaxIndex; ++i) { AudioNormAverage[i] = 0U; } AudioNormalizeFactor = 1000; } /** ** Audio compression. ** ** @param samples sample buffer ** @param count number of bytes in sample buffer */ static void AudioCompressor(int16_t * samples, int count) { int max_sample; int i; int factor; // find loudest sample max_sample = 0; for (i = 0; i < count / AudioBytesProSample; ++i) { int t; t = abs(samples[i]); if (t > max_sample) { max_sample = t; } } // calculate compression factor if (max_sample > 0) { factor = (INT16_MAX * 1000) / max_sample; // smooth compression (FIXME: make configurable?) AudioCompressionFactor = (AudioCompressionFactor * 950 + factor * 50) / 1000; if (AudioCompressionFactor > factor) { AudioCompressionFactor = factor; // no clipping } if (AudioCompressionFactor > AudioMaxCompression) { AudioCompressionFactor = AudioMaxCompression; } } else { return; // silent nothing todo } Debug(4, "audio/compress: max %5d, fac=%6.3f, com=%6.3f\n", max_sample, factor / 1000.0, AudioCompressionFactor / 1000.0); // apply compression factor for (i = 0; i < count / AudioBytesProSample; ++i) { int t; t = (samples[i] * AudioCompressionFactor) / 1000; if (t < INT16_MIN) { t = INT16_MIN; } else if (t > INT16_MAX) { t = INT16_MAX; } samples[i] = t; } } /** ** Reset compressor. */ static void AudioResetCompressor(void) { AudioCompressionFactor = 2000; if (AudioCompressionFactor > AudioMaxCompression) { AudioCompressionFactor = AudioMaxCompression; } } /** ** Audio software amplifier. ** ** @param samples sample buffer ** @param count number of bytes in sample buffer ** ** @todo FIXME: this does hard clipping */ static void AudioSoftAmplifier(int16_t * samples, int count) { int i; // silence if (AudioMute || !AudioAmplifier) { memset(samples, 0, count); return; } for (i = 0; i < count / AudioBytesProSample; ++i) { int t; t = (samples[i] * AudioAmplifier) / 1000; if (t < INT16_MIN) { t = INT16_MIN; } else if (t > INT16_MAX) { t = INT16_MAX; } samples[i] = t; } } #ifdef USE_AUDIO_MIXER /** ** Upmix mono to stereo. ** ** @param in input sample buffer ** @param frames number of frames in sample buffer ** @param out output sample buffer */ static void AudioMono2Stereo(const int16_t * in, int frames, int16_t * out) { int i; for (i = 0; i < frames; ++i) { int t; t = in[i]; out[i * 2 + 0] = t; out[i * 2 + 1] = t; } } /** ** Downmix stereo to mono. ** ** @param in input sample buffer ** @param frames number of frames in sample buffer ** @param out output sample buffer */ static void AudioStereo2Mono(const int16_t * in, int frames, int16_t * out) { int i; for (i = 0; i < frames; i += 2) { out[i / 2] = (in[i + 0] + in[i + 1]) / 2; } } /** ** Downmix surround to stereo. ** ** ffmpeg L R C Ls Rs -> alsa L R Ls Rs C ** ffmpeg L R C LFE Ls Rs -> alsa L R Ls Rs C LFE ** ffmpeg L R C LFE Ls Rs Rl Rr -> alsa L R Ls Rs C LFE Rl Rr ** ** @param in input sample buffer ** @param in_chan nr. of input channels ** @param frames number of frames in sample buffer ** @param out output sample buffer */ static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames, int16_t * out) { while (frames--) { int l; int r; switch (in_chan) { case 3: // stereo or surround? =>stereo l = in[0] * 600; // L r = in[1] * 600; // R l += in[2] * 400; // C r += in[2] * 400; break; case 4: // quad or surround? =>quad l = in[0] * 600; // L r = in[1] * 600; // R l += in[2] * 400; // Ls r += in[3] * 400; // Rs break; case 5: // 5.0 l = in[0] * 500; // L r = in[1] * 500; // R l += in[2] * 200; // Ls r += in[3] * 200; // Rs l += in[4] * 300; // C r += in[4] * 300; break; case 6: // 5.1 l = in[0] * 400; // L r = in[1] * 400; // R l += in[2] * 200; // Ls r += in[3] * 200; // Rs l += in[4] * 300; // C r += in[4] * 300; l += in[5] * 100; // LFE r += in[5] * 100; break; case 7: // 7.0 l = in[0] * 400; // L r = in[1] * 400; // R l += in[2] * 200; // Ls r += in[3] * 200; // Rs l += in[4] * 300; // C r += in[4] * 300; l += in[5] * 100; // RL r += in[6] * 100; // RR break; case 8: // 7.1 l = in[0] * 400; // L r = in[1] * 400; // R l += in[2] * 150; // Ls r += in[3] * 150; // Rs l += in[4] * 250; // C r += in[4] * 250; l += in[5] * 100; // LFE r += in[5] * 100; l += in[6] * 100; // RL r += in[7] * 100; // RR break; default: abort(); } in += in_chan; out[0] = l / 1000; out[1] = r / 1000; out += 2; } } /** ** Upmix @a in_chan channels to @a out_chan. ** ** @param in input sample buffer ** @param in_chan nr. of input channels ** @param frames number of frames in sample buffer ** @param out output sample buffer ** @param out_chan nr. of output channels */ static void AudioUpmix(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan) { while (frames--) { int i; for (i = 0; i < in_chan; ++i) { // copy existing channels *out++ = *in++; } for (; i < out_chan; ++i) { // silents missing channels *out++ = 0; } } } /** ** Resample ffmpeg sample format to hardware format. ** ** FIXME: use libswresample for this and move it to codec. ** FIXME: ffmpeg to alsa conversion is already done in codec.c. ** ** ffmpeg L R C Ls Rs -> alsa L R Ls Rs C ** ffmpeg L R C LFE Ls Rs -> alsa L R Ls Rs C LFE ** ffmpeg L R C LFE Ls Rs Rl Rr -> alsa L R Ls Rs C LFE Rl Rr ** ** @param in input sample buffer ** @param in_chan nr. of input channels ** @param frames number of frames in sample buffer ** @param out output sample buffer ** @param out_chan nr. of output channels */ static void AudioResample(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan) { switch (in_chan * 8 + out_chan) { case 1 * 8 + 1: case 2 * 8 + 2: case 3 * 8 + 3: case 4 * 8 + 4: case 5 * 8 + 5: case 6 * 8 + 6: case 7 * 8 + 7: case 8 * 8 + 8: // input = output channels memcpy(out, in, frames * in_chan * AudioBytesProSample); break; case 2 * 8 + 1: AudioStereo2Mono(in, frames, out); break; case 1 * 8 + 2: AudioMono2Stereo(in, frames, out); break; case 3 * 8 + 2: case 4 * 8 + 2: case 5 * 8 + 2: case 6 * 8 + 2: case 7 * 8 + 2: case 8 * 8 + 2: AudioSurround2Stereo(in, in_chan, frames, out); break; case 5 * 8 + 6: case 3 * 8 + 8: case 5 * 8 + 8: case 6 * 8 + 8: AudioUpmix(in, in_chan, frames, out, out_chan); break; default: Error("audio: unsupported %d -> %d channels resample\n", in_chan, out_chan); // play silence memset(out, 0, frames * out_chan * AudioBytesProSample); break; } } #endif //---------------------------------------------------------------------------- // ring buffer //---------------------------------------------------------------------------- #define AUDIO_RING_MAX 8 ///< number of audio ring buffers /** ** Audio ring buffer. */ typedef struct _audio_ring_ring_ { char FlushBuffers; ///< flag: flush buffers char Passthrough; ///< flag: use pass-through (AC-3, ...) int16_t PacketSize; ///< packet size unsigned HwSampleRate; ///< hardware sample rate in Hz unsigned HwChannels; ///< hardware number of channels unsigned InSampleRate; ///< input sample rate in Hz unsigned InChannels; ///< input number of channels int64_t PTS; ///< pts clock RingBuffer *RingBuffer; ///< sample ring buffer } AudioRingRing; /// ring of audio ring buffers static AudioRingRing AudioRing[AUDIO_RING_MAX]; static int AudioRingWrite; ///< audio ring write pointer static int AudioRingRead; ///< audio ring read pointer static atomic_t AudioRingFilled; ///< how many of the ring is used static unsigned AudioStartThreshold; ///< start play, if filled /** ** Add sample-rate, number of channels change to ring. ** ** @param sample_rate sample-rate frequency ** @param channels number of channels ** @param passthrough use /pass-through (AC-3, ...) device ** ** @retval -1 error ** @retval 0 okay ** ** @note this function shouldn't fail. Checks are done during AudoInit. */ static int AudioRingAdd(unsigned sample_rate, int channels, int passthrough) { unsigned u; // search supported sample-rates for (u = 0; u < AudioRatesMax; ++u) { if (AudioRatesTable[u] == sample_rate) { goto found; } if (AudioRatesTable[u] > sample_rate) { break; } } Error(_("audio: %dHz sample-rate unsupported\n"), sample_rate); return -1; // unsupported sample-rate found: if (!AudioChannelMatrix[u][channels]) { Error(_("audio: %d channels unsupported\n"), channels); return -1; // unsupported nr. of channels } if (atomic_read(&AudioRingFilled) == AUDIO_RING_MAX) { // no free slot // FIXME: can wait for ring buffer empty Error(_("audio: out of ring buffers\n")); return -1; } AudioRingWrite = (AudioRingWrite + 1) % AUDIO_RING_MAX; AudioRing[AudioRingWrite].FlushBuffers = 0; AudioRing[AudioRingWrite].Passthrough = passthrough; AudioRing[AudioRingWrite].PacketSize = 0; AudioRing[AudioRingWrite].InSampleRate = sample_rate; AudioRing[AudioRingWrite].InChannels = channels; AudioRing[AudioRingWrite].HwSampleRate = sample_rate; AudioRing[AudioRingWrite].HwChannels = AudioChannelMatrix[u][channels]; AudioRing[AudioRingWrite].PTS = INT64_C(0x8000000000000000); RingBufferReset(AudioRing[AudioRingWrite].RingBuffer); Debug(3, "audio: %d ring buffer prepared\n", atomic_read(&AudioRingFilled) + 1); atomic_inc(&AudioRingFilled); #ifdef USE_AUDIO_THREAD if (AudioThread) { // tell thread, that there is something todo AudioRunning = 1; pthread_cond_signal(&AudioStartCond); } #endif return 0; } /** ** Setup audio ring. */ static void AudioRingInit(void) { int i; for (i = 0; i < AUDIO_RING_MAX; ++i) { // ~2s 8ch 16bit AudioRing[i].RingBuffer = RingBufferNew(AudioRingBufferSize); } atomic_set(&AudioRingFilled, 0); } /** ** Cleanup audio ring. */ static void AudioRingExit(void) { int i; for (i = 0; i < AUDIO_RING_MAX; ++i) { if (AudioRing[i].RingBuffer) { RingBufferDel(AudioRing[i].RingBuffer); AudioRing[i].RingBuffer = NULL; } AudioRing[i].HwSampleRate = 0; // checked for valid setup AudioRing[i].InSampleRate = 0; } AudioRingRead = 0; AudioRingWrite = 0; } #ifdef USE_ALSA //============================================================================ // A L S A //============================================================================ //---------------------------------------------------------------------------- // Alsa variables //---------------------------------------------------------------------------- static snd_pcm_t *AlsaPCMHandle; ///< alsa pcm handle static char AlsaCanPause; ///< hw supports pause static int AlsaUseMmap; ///< use mmap static snd_mixer_t *AlsaMixer; ///< alsa mixer handle static snd_mixer_elem_t *AlsaMixerElem; ///< alsa pcm mixer element static int AlsaRatio; ///< internal -> mixer ratio * 1000 //---------------------------------------------------------------------------- // alsa pcm //---------------------------------------------------------------------------- /** ** Play samples from ringbuffer. ** ** Fill the kernel buffer, as much as possible. ** ** @retval 0 ok ** @retval 1 ring buffer empty ** @retval -1 underrun error */ static int AlsaPlayRingbuffer(void) { int first; first = 1; for (;;) { // loop for ring buffer wrap int avail; int n; int err; int frames; const void *p; // how many bytes can be written? n = snd_pcm_avail_update(AlsaPCMHandle); if (n < 0) { if (n == -EAGAIN) { continue; } Warning(_("audio/alsa: avail underrun error? '%s'\n"), snd_strerror(n)); err = snd_pcm_recover(AlsaPCMHandle, n, 0); if (err >= 0) { continue; } Error(_("audio/alsa: snd_pcm_avail_update(): %s\n"), snd_strerror(n)); return -1; } avail = snd_pcm_frames_to_bytes(AlsaPCMHandle, n); if (avail < 256) { // too much overhead if (first) { // happens with broken alsa drivers if (AudioThread) { if (!AudioAlsaDriverBroken) { Error(_("audio/alsa: broken driver %d state '%s'\n"), avail, snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle))); } // try to recover if (snd_pcm_state(AlsaPCMHandle) == SND_PCM_STATE_PREPARED) { if ((err = snd_pcm_start(AlsaPCMHandle)) < 0) { Error(_("audio/alsa: snd_pcm_start(): %s\n"), snd_strerror(err)); } } usleep(5 * 1000); } } Debug(4, "audio/alsa: break state '%s'\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle))); break; } n = RingBufferGetReadPointer(AudioRing[AudioRingRead].RingBuffer, &p); if (!n) { // ring buffer empty if (first) { // only error on first loop Debug(4, "audio/alsa: empty buffers %d\n", avail); // ring buffer empty // AlsaLowWaterMark = 1; return 1; } return 0; } if (n < avail) { // not enough bytes in ring buffer avail = n; } if (!avail) { // full or buffer empty break; } // muting pass-through AC-3, can produce disturbance if (AudioMute || (AudioSoftVolume && !AudioRing[AudioRingRead].Passthrough)) { // FIXME: quick&dirty cast AudioSoftAmplifier((int16_t *) p, avail); // FIXME: if not all are written, we double amplify them } frames = snd_pcm_bytes_to_frames(AlsaPCMHandle, avail); #ifdef DEBUG if (avail != snd_pcm_frames_to_bytes(AlsaPCMHandle, frames)) { Error(_("audio/alsa: bytes lost -> out of sync\n")); } #endif for (;;) { if (AlsaUseMmap) { err = snd_pcm_mmap_writei(AlsaPCMHandle, p, frames); } else { err = snd_pcm_writei(AlsaPCMHandle, p, frames); } //Debug(3, "audio/alsa: wrote %d/%d frames\n", err, frames); if (err != frames) { if (err < 0) { if (err == -EAGAIN) { continue; } /* if (err == -EBADFD) { goto again; } */ Warning(_("audio/alsa: writei underrun error? '%s'\n"), snd_strerror(err)); err = snd_pcm_recover(AlsaPCMHandle, err, 0); if (err >= 0) { continue; } Error(_("audio/alsa: snd_pcm_writei failed: %s\n"), snd_strerror(err)); return -1; } // this could happen, if underrun happened Warning(_("audio/alsa: not all frames written\n")); avail = snd_pcm_frames_to_bytes(AlsaPCMHandle, err); } break; } RingBufferReadAdvance(AudioRing[AudioRingRead].RingBuffer, avail); first = 0; } return 0; } /** ** Flush alsa buffers. */ static void AlsaFlushBuffers(void) { if (AlsaPCMHandle) { int err; snd_pcm_state_t state; state = snd_pcm_state(AlsaPCMHandle); Debug(3, "audio/alsa: flush state %s\n", snd_pcm_state_name(state)); if (state != SND_PCM_STATE_OPEN) { if ((err = snd_pcm_drop(AlsaPCMHandle)) < 0) { Error(_("audio: snd_pcm_drop(): %s\n"), snd_strerror(err)); } // ****ing alsa crash, when in open state here if ((err = snd_pcm_prepare(AlsaPCMHandle)) < 0) { Error(_("audio: snd_pcm_prepare(): %s\n"), snd_strerror(err)); } } } } #ifdef USE_AUDIO_THREAD //---------------------------------------------------------------------------- // thread playback //---------------------------------------------------------------------------- /** ** Alsa thread ** ** Play some samples and return. ** ** @retval -1 error ** @retval 0 underrun ** @retval 1 running */ static int AlsaThread(void) { int err; if (!AlsaPCMHandle) { usleep(24 * 1000); return -1; } for (;;) { if (AudioPaused) { return 1; } // wait for space in kernel buffers if ((err = snd_pcm_wait(AlsaPCMHandle, 24)) < 0) { Warning(_("audio/alsa: wait underrun error? '%s'\n"), snd_strerror(err)); err = snd_pcm_recover(AlsaPCMHandle, err, 0); if (err >= 0) { continue; } Error(_("audio/alsa: snd_pcm_wait(): %s\n"), snd_strerror(err)); usleep(24 * 1000); return -1; } break; } if (!err || AudioPaused) { // timeout or some commands return 1; } if ((err = AlsaPlayRingbuffer())) { // empty or error snd_pcm_state_t state; if (err < 0) { // underrun error return -1; } state = snd_pcm_state(AlsaPCMHandle); if (state != SND_PCM_STATE_RUNNING) { Debug(3, "audio/alsa: stopping play '%s'\n", snd_pcm_state_name(state)); return 0; } usleep(24 * 1000); // let fill/empty the buffers } return 1; } #endif //---------------------------------------------------------------------------- /** ** Open alsa pcm device. ** ** @param passthrough use pass-through (AC-3, ...) device */ static snd_pcm_t *AlsaOpenPCM(int passthrough) { const char *device; snd_pcm_t *handle; int err; // &&|| hell if (!(passthrough && ((device = AudioPassthroughDevice) || (device = getenv("ALSA_PASSTHROUGH_DEVICE")))) && !(device = AudioPCMDevice) && !(device = getenv("ALSA_DEVICE"))) { device = "default"; } if (!AudioDoingInit) { // reduce blabla during init Info(_("audio/alsa: using %sdevice '%s'\n"), passthrough ? "pass-through " : "", device); } // // for AC3 pass-through try to set the non-audio bit, use AES0=6 // if (passthrough && AudioAppendAES) { #if 0 // FIXME: not yet finished char *buf; const char *s; int n; n = strlen(device); buf = alloca(n + sizeof(":AES0=6") + 1); strcpy(buf, device); if (!(s = strchr(buf, ':'))) { // no alsa parameters strcpy(buf + n, ":AES=6"); } Debug(3, "audio/alsa: try '%s'\n", buf); #endif } // open none blocking; if device is already used, we don't want wait if ((err = snd_pcm_open(&handle, device, SND_PCM_STREAM_PLAYBACK, SND_PCM_NONBLOCK)) < 0) { Error(_("audio/alsa: playback open '%s' error: %s\n"), device, snd_strerror(err)); return NULL; } if ((err = snd_pcm_nonblock(handle, 0)) < 0) { Error(_("audio/alsa: can't set block mode: %s\n"), snd_strerror(err)); } return handle; } /** ** Initialize alsa pcm device. ** ** @see AudioPCMDevice */ static void AlsaInitPCM(void) { snd_pcm_t *handle; snd_pcm_hw_params_t *hw_params; int err; if (!(handle = AlsaOpenPCM(0))) { return; } // FIXME: pass-through and pcm out can support different features snd_pcm_hw_params_alloca(&hw_params); // choose all parameters if ((err = snd_pcm_hw_params_any(handle, hw_params)) < 0) { Error(_ ("audio: snd_pcm_hw_params_any: no configurations available: %s\n"), snd_strerror(err)); } AlsaCanPause = snd_pcm_hw_params_can_pause(hw_params); Info(_("audio/alsa: supports pause: %s\n"), AlsaCanPause ? "yes" : "no"); AlsaPCMHandle = handle; } //---------------------------------------------------------------------------- // Alsa Mixer //---------------------------------------------------------------------------- /** ** Set alsa mixer volume (0-1000) ** ** @param volume volume (0 .. 1000) */ static void AlsaSetVolume(int volume) { int v; if (AlsaMixer && AlsaMixerElem) { v = (volume * AlsaRatio) / (1000 * 1000); snd_mixer_selem_set_playback_volume(AlsaMixerElem, 0, v); snd_mixer_selem_set_playback_volume(AlsaMixerElem, 1, v); } } /** ** Initialize alsa mixer. */ static void AlsaInitMixer(void) { const char *device; const char *channel; snd_mixer_t *alsa_mixer; snd_mixer_elem_t *alsa_mixer_elem; long alsa_mixer_elem_min; long alsa_mixer_elem_max; if (!(device = AudioMixerDevice)) { if (!(device = getenv("ALSA_MIXER"))) { device = "default"; } } if (!(channel = AudioMixerChannel)) { if (!(channel = getenv("ALSA_MIXER_CHANNEL"))) { channel = "PCM"; } } Debug(3, "audio/alsa: mixer %s - %s open\n", device, channel); snd_mixer_open(&alsa_mixer, 0); if (alsa_mixer && snd_mixer_attach(alsa_mixer, device) >= 0 && snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0 && snd_mixer_load(alsa_mixer) >= 0) { const char *const alsa_mixer_elem_name = channel; alsa_mixer_elem = snd_mixer_first_elem(alsa_mixer); while (alsa_mixer_elem) { const char *name; name = snd_mixer_selem_get_name(alsa_mixer_elem); if (!strcasecmp(name, alsa_mixer_elem_name)) { snd_mixer_selem_get_playback_volume_range(alsa_mixer_elem, &alsa_mixer_elem_min, &alsa_mixer_elem_max); AlsaRatio = 1000 * (alsa_mixer_elem_max - alsa_mixer_elem_min); Debug(3, "audio/alsa: PCM mixer found %ld - %ld ratio %d\n", alsa_mixer_elem_min, alsa_mixer_elem_max, AlsaRatio); break; } alsa_mixer_elem = snd_mixer_elem_next(alsa_mixer_elem); } AlsaMixer = alsa_mixer; AlsaMixerElem = alsa_mixer_elem; } else { Error(_("audio/alsa: can't open mixer '%s'\n"), device); } } //---------------------------------------------------------------------------- // Alsa API //---------------------------------------------------------------------------- /** ** Get alsa audio delay in time-stamps. ** ** @returns audio delay in time-stamps. ** ** @todo FIXME: handle the case no audio running */ static int64_t AlsaGetDelay(void) { int err; snd_pcm_sframes_t delay; int64_t pts; // setup error if (!AlsaPCMHandle || !AudioRing[AudioRingRead].HwSampleRate) { return 0L; } // delay in frames in alsa + kernel buffers if ((err = snd_pcm_delay(AlsaPCMHandle, &delay)) < 0) { //Debug(3, "audio/alsa: no hw delay\n"); delay = 0L; #ifdef DEBUG } else if (snd_pcm_state(AlsaPCMHandle) != SND_PCM_STATE_RUNNING) { //Debug(3, "audio/alsa: %ld frames delay ok, but not running\n", delay); #endif } //Debug(3, "audio/alsa: %ld frames hw delay\n", delay); // delay can be negative, when underrun occur if (delay < 0) { delay = 0L; } pts = ((int64_t) delay * 90 * 1000) / AudioRing[AudioRingRead].HwSampleRate; return pts; } /** ** Setup alsa audio for requested format. ** ** @param freq sample frequency ** @param channels number of channels ** @param passthrough use pass-through (AC-3, ...) device ** ** @retval 0 everything ok ** @retval 1 didn't support frequency/channels combination ** @retval -1 something gone wrong ** ** @todo FIXME: remove pointer for freq + channels */ static int AlsaSetup(int *freq, int *channels, int passthrough) { snd_pcm_uframes_t buffer_size; snd_pcm_uframes_t period_size; int err; int delay; if (!AlsaPCMHandle) { // alsa not running yet // FIXME: if open fails for fe. pass-through, we never recover return -1; } if (!AudioAlsaNoCloseOpen) { // close+open to fix HDMI no sound bug snd_pcm_t *handle; handle = AlsaPCMHandle; // no lock needed, thread exit in main loop only //Debug(3, "audio: %s [\n", __FUNCTION__); AlsaPCMHandle = NULL; // other threads should check handle snd_pcm_close(handle); if (AudioAlsaCloseOpenDelay) { usleep(50 * 1000); // 50ms delay for alsa recovery } // FIXME: can use multiple retries if (!(handle = AlsaOpenPCM(passthrough))) { return -1; } AlsaPCMHandle = handle; //Debug(3, "audio: %s ]\n", __FUNCTION__); } for (;;) { if ((err = snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16, AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1, 96 * 1000))) { // try reduced buffer size (needed for sunxi) // FIXME: alternativ make this configurable if ((err = snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16, AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1, 72 * 1000))) { /* if ( err == -EBADFD ) { snd_pcm_close(AlsaPCMHandle); AlsaPCMHandle = NULL; continue; } */ if (!AudioDoingInit) { Error(_("audio/alsa: set params error: %s\n"), snd_strerror(err)); } // FIXME: must stop sound, AudioChannels ... invalid return -1; } } break; } // this is disabled, no advantages! if (0) { // no underruns allowed, play silence snd_pcm_sw_params_t *sw_params; snd_pcm_uframes_t boundary; snd_pcm_sw_params_alloca(&sw_params); err = snd_pcm_sw_params_current(AlsaPCMHandle, sw_params); if (err < 0) { Error(_("audio: snd_pcm_sw_params_current failed: %s\n"), snd_strerror(err)); } if ((err = snd_pcm_sw_params_get_boundary(sw_params, &boundary)) < 0) { Error(_("audio: snd_pcm_sw_params_get_boundary failed: %s\n"), snd_strerror(err)); } Debug(4, "audio/alsa: boundary %lu frames\n", boundary); if ((err = snd_pcm_sw_params_set_stop_threshold(AlsaPCMHandle, sw_params, boundary)) < 0) { Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err)); } if ((err = snd_pcm_sw_params_set_silence_size(AlsaPCMHandle, sw_params, boundary)) < 0) { Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err)); } if ((err = snd_pcm_sw_params(AlsaPCMHandle, sw_params)) < 0) { Error(_("audio: snd_pcm_sw_params failed: %s\n"), snd_strerror(err)); } } // update buffer snd_pcm_get_params(AlsaPCMHandle, &buffer_size, &period_size); Debug(3, "audio/alsa: buffer size %lu %zdms, period size %lu %zdms\n", buffer_size, snd_pcm_frames_to_bytes(AlsaPCMHandle, buffer_size) * 1000 / (*freq * *channels * AudioBytesProSample), period_size, snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size) * 1000 / (*freq * *channels * AudioBytesProSample)); Debug(3, "audio/alsa: state %s\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle))); AudioStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size); // buffer time/delay in ms delay = AudioBufferTime; if (VideoAudioDelay > 0) { delay += VideoAudioDelay / 90; } if (AudioStartThreshold < (*freq * *channels * AudioBytesProSample * delay) / 1000U) { AudioStartThreshold = (*freq * *channels * AudioBytesProSample * delay) / 1000U; } // no bigger, than 1/3 the buffer if (AudioStartThreshold > AudioRingBufferSize / 3) { AudioStartThreshold = AudioRingBufferSize / 3; } if (!AudioDoingInit) { Info(_("audio/alsa: start delay %ums\n"), (AudioStartThreshold * 1000) / (*freq * *channels * AudioBytesProSample)); } return 0; } /** ** Play audio. */ static void AlsaPlay(void) { int err; if (AlsaCanPause) { if ((err = snd_pcm_pause(AlsaPCMHandle, 0))) { Error(_("audio/alsa: snd_pcm_pause(): %s\n"), snd_strerror(err)); } } else { if ((err = snd_pcm_prepare(AlsaPCMHandle)) < 0) { Error(_("audio/alsa: snd_pcm_prepare(): %s\n"), snd_strerror(err)); } } #ifdef DEBUG if (snd_pcm_state(AlsaPCMHandle) == SND_PCM_STATE_PAUSED) { Error(_("audio/alsa: still paused\n")); } #endif } /** ** Pause audio. */ static void AlsaPause(void) { int err; if (AlsaCanPause) { if ((err = snd_pcm_pause(AlsaPCMHandle, 1))) { Error(_("snd_pcm_pause(): %s\n"), snd_strerror(err)); } } else { if ((err = snd_pcm_drop(AlsaPCMHandle)) < 0) { Error(_("snd_pcm_drop(): %s\n"), snd_strerror(err)); } } } /** ** Empty log callback */ static void AlsaNoopCallback( __attribute__ ((unused)) const char *file, __attribute__ ((unused)) int line, __attribute__ ((unused)) const char *function, __attribute__ ((unused)) int err, __attribute__ ((unused)) const char *fmt, ...) { } /** ** Initialize alsa audio output module. */ static void AlsaInit(void) { #ifdef DEBUG (void)AlsaNoopCallback; #else // disable display of alsa error messages snd_lib_error_set_handler(AlsaNoopCallback); #endif AlsaInitPCM(); AlsaInitMixer(); } /** ** Cleanup alsa audio output module. */ static void AlsaExit(void) { if (AlsaPCMHandle) { snd_pcm_close(AlsaPCMHandle); AlsaPCMHandle = NULL; } if (AlsaMixer) { snd_mixer_close(AlsaMixer); AlsaMixer = NULL; AlsaMixerElem = NULL; } } /** ** Alsa module. */ static const AudioModule AlsaModule = { .Name = "alsa", #ifdef USE_AUDIO_THREAD .Thread = AlsaThread, #endif .FlushBuffers = AlsaFlushBuffers, .GetDelay = AlsaGetDelay, .SetVolume = AlsaSetVolume, .Setup = AlsaSetup, .Play = AlsaPlay, .Pause = AlsaPause, .Init = AlsaInit, .Exit = AlsaExit, }; #endif // USE_ALSA #ifdef USE_OSS //============================================================================ // O S S //============================================================================ //---------------------------------------------------------------------------- // OSS variables //---------------------------------------------------------------------------- static int OssPcmFildes = -1; ///< pcm file descriptor static int OssMixerFildes = -1; ///< mixer file descriptor static int OssMixerChannel; ///< mixer channel index static int OssFragmentTime; ///< fragment time in ms //---------------------------------------------------------------------------- // OSS pcm //---------------------------------------------------------------------------- /** ** Play samples from ringbuffer. ** ** @retval 0 ok ** @retval 1 ring buffer empty ** @retval -1 underrun error */ static int OssPlayRingbuffer(void) { int first; first = 1; for (;;) { audio_buf_info bi; const void *p; int n; if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno)); return -1; } Debug(4, "audio/oss: %d bytes free\n", bi.bytes); n = RingBufferGetReadPointer(AudioRing[AudioRingRead].RingBuffer, &p); if (!n) { // ring buffer empty if (first) { // only error on first loop return 1; } return 0; } if (n < bi.bytes) { // not enough bytes in ring buffer bi.bytes = n; } if (bi.bytes <= 0) { // full or buffer empty break; // bi.bytes could become negative! } if (AudioSoftVolume && !AudioRing[AudioRingRead].Passthrough) { // FIXME: quick&dirty cast AudioSoftAmplifier((int16_t *) p, bi.bytes); // FIXME: if not all are written, we double amplify them } for (;;) { n = write(OssPcmFildes, p, bi.bytes); if (n != bi.bytes) { if (n < 0) { if (n == EAGAIN) { continue; } Error(_("audio/oss: write error: %s\n"), strerror(errno)); return 1; } Warning(_("audio/oss: error not all bytes written\n")); } break; } // advance how many could written RingBufferReadAdvance(AudioRing[AudioRingRead].RingBuffer, n); first = 0; } return 0; } /** ** Flush OSS buffers. */ static void OssFlushBuffers(void) { if (OssPcmFildes != -1) { // flush kernel buffers if (ioctl(OssPcmFildes, SNDCTL_DSP_HALT_OUTPUT, NULL) < 0) { Error(_("audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n"), strerror(errno)); } } } #ifdef USE_AUDIO_THREAD //---------------------------------------------------------------------------- // thread playback //---------------------------------------------------------------------------- /** ** OSS thread ** ** @retval -1 error ** @retval 0 underrun ** @retval 1 running */ static int OssThread(void) { int err; if (!OssPcmFildes) { usleep(OssFragmentTime * 1000); return -1; } for (;;) { struct pollfd fds[1]; if (AudioPaused) { return 1; } // wait for space in kernel buffers fds[0].fd = OssPcmFildes; fds[0].events = POLLOUT | POLLERR; // wait for space in kernel buffers err = poll(fds, 1, OssFragmentTime); if (err < 0) { if (err == EAGAIN) { continue; } Error(_("audio/oss: error poll %s\n"), strerror(errno)); usleep(OssFragmentTime * 1000); return -1; } break; } if (!err || AudioPaused) { // timeout or some commands return 1; } if ((err = OssPlayRingbuffer())) { // empty / error if (err < 0) { // underrun error return -1; } pthread_yield(); usleep(OssFragmentTime * 1000); // let fill/empty the buffers return 0; } return 1; } #endif //---------------------------------------------------------------------------- /** ** Open OSS pcm device. ** ** @param passthrough use pass-through (AC-3, ...) device */ static int OssOpenPCM(int passthrough) { const char *device; int fildes; // &&|| hell if (!(passthrough && ((device = AudioPassthroughDevice) || (device = getenv("OSS_PASSTHROUGHDEV")))) && !(device = AudioPCMDevice) && !(device = getenv("OSS_AUDIODEV"))) { device = "/dev/dsp"; } if (!AudioDoingInit) { Info(_("audio/oss: using %sdevice '%s'\n"), passthrough ? "pass-through " : "", device); } if ((fildes = open(device, O_WRONLY)) < 0) { Error(_("audio/oss: can't open dsp device '%s': %s\n"), device, strerror(errno)); return -1; } return fildes; } /** ** Initialize OSS pcm device. ** ** @see AudioPCMDevice */ static void OssInitPCM(void) { int fildes; fildes = OssOpenPCM(0); OssPcmFildes = fildes; } //---------------------------------------------------------------------------- // OSS Mixer //---------------------------------------------------------------------------- /** ** Set OSS mixer volume (0-1000) ** ** @param volume volume (0 .. 1000) */ static void OssSetVolume(int volume) { int v; if (OssMixerFildes != -1) { v = (volume * 255) / 1000; v &= 0xff; v = (v << 8) | v; if (ioctl(OssMixerFildes, MIXER_WRITE(OssMixerChannel), &v) < 0) { Error(_("audio/oss: ioctl(MIXER_WRITE): %s\n"), strerror(errno)); } } } /** ** Mixer channel name table. */ static const char *OssMixerChannelNames[SOUND_MIXER_NRDEVICES] = SOUND_DEVICE_NAMES; /** ** Initialize OSS mixer. */ static void OssInitMixer(void) { const char *device; const char *channel; int fildes; int devmask; int i; if (!(device = AudioMixerDevice)) { if (!(device = getenv("OSS_MIXERDEV"))) { device = "/dev/mixer"; } } if (!(channel = AudioMixerChannel)) { if (!(channel = getenv("OSS_MIXER_CHANNEL"))) { channel = "pcm"; } } Debug(3, "audio/oss: mixer %s - %s open\n", device, channel); if ((fildes = open(device, O_RDWR)) < 0) { Error(_("audio/oss: can't open mixer device '%s': %s\n"), device, strerror(errno)); return; } // search channel name if (ioctl(fildes, SOUND_MIXER_READ_DEVMASK, &devmask) < 0) { Error(_("audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n"), strerror(errno)); close(fildes); return; } for (i = 0; i < SOUND_MIXER_NRDEVICES; ++i) { if (!strcasecmp(OssMixerChannelNames[i], channel)) { if (devmask & (1 << i)) { OssMixerFildes = fildes; OssMixerChannel = i; return; } Error(_("audio/oss: channel '%s' not supported\n"), channel); break; } } Error(_("audio/oss: channel '%s' not found\n"), channel); close(fildes); } //---------------------------------------------------------------------------- // OSS API //---------------------------------------------------------------------------- /** ** Get OSS audio delay in time stamps. ** ** @returns audio delay in time stamps. */ static int64_t OssGetDelay(void) { int delay; int64_t pts; // setup failure if (OssPcmFildes == -1 || !AudioRing[AudioRingRead].HwSampleRate) { return 0L; } if (!AudioRunning) { // audio not running Error(_("audio/oss: should not happen\n")); return 0L; } // delay in bytes in kernel buffers delay = -1; if (ioctl(OssPcmFildes, SNDCTL_DSP_GETODELAY, &delay) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n"), strerror(errno)); return 0L; } if (delay < 0) { delay = 0; } pts = ((int64_t) delay * 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels * AudioBytesProSample); return pts; } /** ** Setup OSS audio for requested format. ** ** @param sample_rate sample rate/frequency ** @param channels number of channels ** @param passthrough use pass-through (AC-3, ...) device ** ** @retval 0 everything ok ** @retval 1 didn't support frequency/channels combination ** @retval -1 something gone wrong */ static int OssSetup(int *sample_rate, int *channels, int passthrough) { int ret; int tmp; int delay; audio_buf_info bi; if (OssPcmFildes == -1) { // OSS not ready // FIXME: if open fails for fe. pass-through, we never recover return -1; } if (1) { // close+open for pcm / AC-3 int fildes; fildes = OssPcmFildes; OssPcmFildes = -1; close(fildes); if (!(fildes = OssOpenPCM(passthrough))) { return -1; } OssPcmFildes = fildes; } ret = 0; tmp = AFMT_S16_NE; // native 16 bits if (ioctl(OssPcmFildes, SNDCTL_DSP_SETFMT, &tmp) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_SETFMT): %s\n"), strerror(errno)); // FIXME: stop player, set setup failed flag return -1; } if (tmp != AFMT_S16_NE) { Error(_("audio/oss: device doesn't support 16 bit sample format.\n")); // FIXME: stop player, set setup failed flag return -1; } tmp = *channels; if (ioctl(OssPcmFildes, SNDCTL_DSP_CHANNELS, &tmp) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n"), strerror(errno)); return -1; } if (tmp != *channels) { Warning(_("audio/oss: device doesn't support %d channels.\n"), *channels); *channels = tmp; ret = 1; } tmp = *sample_rate; if (ioctl(OssPcmFildes, SNDCTL_DSP_SPEED, &tmp) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_SPEED): %s\n"), strerror(errno)); return -1; } if (tmp != *sample_rate) { Warning(_("audio/oss: device doesn't support %dHz sample rate.\n"), *sample_rate); *sample_rate = tmp; ret = 1; } #ifdef SNDCTL_DSP_POLICY tmp = 3; if (ioctl(OssPcmFildes, SNDCTL_DSP_POLICY, &tmp) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_POLICY): %s\n"), strerror(errno)); } else { Info("audio/oss: set policy to %d\n", tmp); } #endif if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) { Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno)); bi.fragsize = 4096; bi.fragstotal = 16; } else { Debug(3, "audio/oss: %d bytes buffered\n", bi.bytes); } OssFragmentTime = (bi.fragsize * 1000) / (*sample_rate * *channels * AudioBytesProSample); Debug(3, "audio/oss: buffer size %d %dms, fragment size %d %dms\n", bi.fragsize * bi.fragstotal, (bi.fragsize * bi.fragstotal * 1000) / (*sample_rate * *channels * AudioBytesProSample), bi.fragsize, OssFragmentTime); // start when enough bytes for initial write AudioStartThreshold = (bi.fragsize - 1) * bi.fragstotal; // buffer time/delay in ms delay = AudioBufferTime + 300; if (VideoAudioDelay > 0) { delay += VideoAudioDelay / 90; } if (AudioStartThreshold < (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U) { AudioStartThreshold = (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U; } // no bigger, than 1/3 the buffer if (AudioStartThreshold > AudioRingBufferSize / 3) { AudioStartThreshold = AudioRingBufferSize / 3; } if (!AudioDoingInit) { Info(_("audio/oss: delay %ums\n"), (AudioStartThreshold * 1000) / (*sample_rate * *channels * AudioBytesProSample)); } return ret; } /** ** Play audio. */ static void OssPlay(void) { } /** ** Pause audio. */ void OssPause(void) { } /** ** Initialize OSS audio output module. */ static void OssInit(void) { OssInitPCM(); OssInitMixer(); } /** ** Cleanup OSS audio output module. */ static void OssExit(void) { if (OssPcmFildes != -1) { close(OssPcmFildes); OssPcmFildes = -1; } if (OssMixerFildes != -1) { close(OssMixerFildes); OssMixerFildes = -1; } } /** ** OSS module. */ static const AudioModule OssModule = { .Name = "oss", #ifdef USE_AUDIO_THREAD .Thread = OssThread, #endif .FlushBuffers = OssFlushBuffers, .GetDelay = OssGetDelay, .SetVolume = OssSetVolume, .Setup = OssSetup, .Play = OssPlay, .Pause = OssPause, .Init = OssInit, .Exit = OssExit, }; #endif // USE_OSS //============================================================================ // Noop //============================================================================ /** ** Get audio delay in time stamps. ** ** @returns audio delay in time stamps. */ static int64_t NoopGetDelay(void) { return 0L; } /** ** Set mixer volume (0-1000) ** ** @param volume volume (0 .. 1000) */ static void NoopSetVolume( __attribute__ ((unused)) int volume) { } /** ** Noop setup. ** ** @param freq sample frequency ** @param channels number of channels ** @param passthrough use pass-through (AC-3, ...) device */ static int NoopSetup( __attribute__ ((unused)) int *channels, __attribute__ ((unused)) int *freq, __attribute__ ((unused)) int passthrough) { return -1; } /** ** Noop void */ static void NoopVoid(void) { } /** ** Noop module. */ static const AudioModule NoopModule = { .Name = "noop", .FlushBuffers = NoopVoid, .GetDelay = NoopGetDelay, .SetVolume = NoopSetVolume, .Setup = NoopSetup, .Play = NoopVoid, .Pause = NoopVoid, .Init = NoopVoid, .Exit = NoopVoid, }; //---------------------------------------------------------------------------- // thread playback //---------------------------------------------------------------------------- #ifdef USE_AUDIO_THREAD /** ** Prepare next ring buffer. */ static int AudioNextRing(void) { int passthrough; int sample_rate; int channels; size_t used; // update audio format // not always needed, but check if needed is too complex passthrough = AudioRing[AudioRingRead].Passthrough; sample_rate = AudioRing[AudioRingRead].HwSampleRate; channels = AudioRing[AudioRingRead].HwChannels; if (AudioUsedModule->Setup(&sample_rate, &channels, passthrough)) { Error(_("audio: can't set channels %d sample-rate %dHz\n"), channels, sample_rate); // FIXME: handle error AudioRing[AudioRingRead].HwSampleRate = 0; AudioRing[AudioRingRead].InSampleRate = 0; return -1; } AudioSetVolume(AudioVolume); // update channel delta AudioResetCompressor(); AudioResetNormalizer(); Debug(3, "audio: a/v next buf(%d,%4zdms)\n", atomic_read(&AudioRingFilled), (RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer) * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample)); // stop, if not enough in next buffer used = RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer); if (AudioStartThreshold * 4 < used || (AudioVideoIsReady && AudioStartThreshold < used)) { return 0; } return 1; } /** ** Audio play thread. ** ** @param dummy unused thread argument */ static void *AudioPlayHandlerThread(void *dummy) { Debug(3, "audio: play thread started\n"); for (;;) { // check if we should stop the thread if (AudioThreadStop) { Debug(3, "audio: play thread stopped\n"); return PTHREAD_CANCELED; } Debug(3, "audio: wait on start condition\n"); pthread_mutex_lock(&AudioMutex); AudioRunning = 0; do { pthread_cond_wait(&AudioStartCond, &AudioMutex); // cond_wait can return, without signal! } while (!AudioRunning); pthread_mutex_unlock(&AudioMutex); Debug(3, "audio: ----> %dms start\n", (AudioUsedBytes() * 1000) / (!AudioRing[AudioRingWrite].HwSampleRate + !AudioRing[AudioRingWrite].HwChannels + AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample)); do { int filled; int read; int flush; int err; int i; // check if we should stop the thread if (AudioThreadStop) { Debug(3, "audio: play thread stopped\n"); return PTHREAD_CANCELED; } // look if there is a flush command in the queue flush = 0; filled = atomic_read(&AudioRingFilled); read = AudioRingRead; i = filled; while (i--) { read = (read + 1) % AUDIO_RING_MAX; if (AudioRing[read].FlushBuffers) { AudioRing[read].FlushBuffers = 0; AudioRingRead = read; // handle all flush in queue flush = filled - i; } } if (flush) { Debug(3, "audio: flush %d ring buffer(s)\n", flush); AudioUsedModule->FlushBuffers(); atomic_sub(flush, &AudioRingFilled); if (AudioNextRing()) { Debug(3, "audio: break after flush\n"); break; } Debug(3, "audio: continue after flush\n"); } // try to play some samples err = 0; if (RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)) { err = AudioUsedModule->Thread(); } // underrun, check if new ring buffer is available if (!err) { int passthrough; int sample_rate; int channels; int old_passthrough; int old_sample_rate; int old_channels; // underrun, and no new ring buffer, goto sleep. if (!atomic_read(&AudioRingFilled)) { break; } Debug(3, "audio: next ring buffer\n"); old_passthrough = AudioRing[AudioRingRead].Passthrough; old_sample_rate = AudioRing[AudioRingRead].HwSampleRate; old_channels = AudioRing[AudioRingRead].HwChannels; atomic_dec(&AudioRingFilled); AudioRingRead = (AudioRingRead + 1) % AUDIO_RING_MAX; passthrough = AudioRing[AudioRingRead].Passthrough; sample_rate = AudioRing[AudioRingRead].HwSampleRate; channels = AudioRing[AudioRingRead].HwChannels; Debug(3, "audio: thread channels %d frequency %dHz %s\n", channels, sample_rate, passthrough ? "pass-through" : ""); // audio config changed? if (old_passthrough != passthrough || old_sample_rate != sample_rate || old_channels != channels) { // FIXME: wait for buffer drain if (AudioNextRing()) { break; } } else { AudioResetCompressor(); AudioResetNormalizer(); } } // FIXME: check AudioPaused ...Thread() if (AudioPaused) { break; } } while (AudioRing[AudioRingRead].HwSampleRate); } return dummy; } /** ** Initialize audio thread. */ static void AudioInitThread(void) { AudioThreadStop = 0; pthread_mutex_init(&AudioMutex, NULL); pthread_cond_init(&AudioStartCond, NULL); pthread_create(&AudioThread, NULL, AudioPlayHandlerThread, NULL); pthread_setname_np(AudioThread, "softhddev audio"); } /** ** Cleanup audio thread. */ static void AudioExitThread(void) { void *retval; Debug(3, "audio: %s\n", __FUNCTION__); if (AudioThread) { AudioThreadStop = 1; AudioRunning = 1; // wakeup thread, if needed pthread_cond_signal(&AudioStartCond); if (pthread_join(AudioThread, &retval) || retval != PTHREAD_CANCELED) { Error(_("audio: can't cancel play thread\n")); } pthread_cond_destroy(&AudioStartCond); pthread_mutex_destroy(&AudioMutex); AudioThread = 0; } } #endif //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- /** ** Table of all audio modules. */ static const AudioModule *AudioModules[] = { #ifdef USE_ALSA &AlsaModule, #endif #ifdef USE_OSS &OssModule, #endif &NoopModule, }; /** ** Place samples in audio output queue. ** ** @param samples sample buffer ** @param count number of bytes in sample buffer */ void AudioEnqueue(const void *samples, int count) { size_t n; int16_t *buffer; #ifdef noDEBUG static uint32_t last_tick; uint32_t tick; tick = GetMsTicks(); if (tick - last_tick > 101) { Debug(3, "audio: enqueue %4d %dms\n", count, tick - last_tick); } last_tick = tick; #endif if (!AudioRing[AudioRingWrite].HwSampleRate) { Debug(3, "audio: enqueue not ready\n"); return; // no setup yet } // save packet size if (!AudioRing[AudioRingWrite].PacketSize) { AudioRing[AudioRingWrite].PacketSize = count; Debug(3, "audio: a/v packet size %d bytes\n", count); } // audio sample modification allowed and needed? buffer = (void *)samples; if (!AudioRing[AudioRingWrite].Passthrough && (AudioCompression || AudioNormalize || AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels)) { int frames; // resample into ring-buffer is too complex in the case of a roundabout // just use a temporary buffer frames = count / (AudioRing[AudioRingWrite].InChannels * AudioBytesProSample); buffer = alloca(frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample); #ifdef USE_AUDIO_MIXER // Convert / resample input to hardware format AudioResample(samples, AudioRing[AudioRingWrite].InChannels, frames, buffer, AudioRing[AudioRingWrite].HwChannels); #else #ifdef DEBUG if (AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels) { Debug(3, "audio: internal failure channels mismatch\n"); return; } #endif memcpy(buffer, samples, count); #endif count = frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample; if (AudioCompression) { // in place operation AudioCompressor(buffer, count); } if (AudioNormalize) { // in place operation AudioNormalizer(buffer, count); } } n = RingBufferWrite(AudioRing[AudioRingWrite].RingBuffer, buffer, count); if (n != (size_t) count) { Error(_("audio: can't place %d samples in ring buffer\n"), count); // too many bytes are lost // FIXME: caller checks buffer full. // FIXME: should skip more, longer skip, but less often? // FIXME: round to channel + sample border } if (!AudioRunning) { // check, if we can start the thread int skip; n = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer); skip = AudioSkip; // FIXME: round to packet size Debug(3, "audio: start? %4zdms skip %dms\n", (n * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample), (skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample)); if (skip) { if (n < (unsigned)skip) { skip = n; } AudioSkip -= skip; RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, skip); n = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer); } // forced start or enough video + audio buffered // for some exotic channels * 4 too small if (AudioStartThreshold * 4 < n || (AudioVideoIsReady && AudioStartThreshold < n)) { // restart play-back // no lock needed, can wakeup next time AudioRunning = 1; pthread_cond_signal(&AudioStartCond); } } // Update audio clock (stupid gcc developers thinks INT64_C is unsigned) if (AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) { AudioRing[AudioRingWrite].PTS += ((int64_t) count * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample); } } /** ** Video is ready. ** ** @param pts video presentation timestamp */ void AudioVideoReady(int64_t pts) { int64_t audio_pts; size_t used; if (pts == (int64_t) INT64_C(0x8000000000000000)) { Debug(3, "audio: a/v start, no valid video\n"); return; } // no valid audio known if (!AudioRing[AudioRingWrite].HwSampleRate || !AudioRing[AudioRingWrite].HwChannels || AudioRing[AudioRingWrite].PTS == (int64_t) INT64_C(0x8000000000000000)) { Debug(3, "audio: a/v start, no valid audio\n"); AudioVideoIsReady = 1; return; } // Audio.PTS = next written sample time stamp used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer); audio_pts = AudioRing[AudioRingWrite].PTS - (used * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample); Debug(3, "audio: a/v sync buf(%d,%4zdms) %s|%s = %dms %s\n", atomic_read(&AudioRingFilled), (used * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample), Timestamp2String(pts), Timestamp2String(audio_pts), (int)(pts - audio_pts) / 90, AudioRunning ? "running" : "ready"); if (!AudioRunning) { int skip; // buffer ~15 video frames // FIXME: HDTV can use smaller video buffer skip = pts - 15 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay; #ifdef DEBUG fprintf(stderr, "%dms %dms %dms\n", (int)(pts - audio_pts) / 90, VideoAudioDelay / 90, skip / 90); #endif // guard against old PTS if (skip > 0 && skip < 2000 * 90) { skip = (((int64_t) skip * AudioRing[AudioRingWrite].HwSampleRate) / (1000 * 90)) * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample; // FIXME: round to packet size if ((unsigned)skip > used) { AudioSkip = skip - used; skip = used; } Debug(3, "audio: sync advance %dms %d/%zd\n", (skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample), skip, used); RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, skip); used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer); } // FIXME: skip<0 we need bigger audio buffer // enough video + audio buffered if (AudioStartThreshold < used) { AudioRunning = 1; pthread_cond_signal(&AudioStartCond); } } AudioVideoIsReady = 1; #if 0 if (AudioRing[AudioRingWrite].HwSampleRate && AudioRing[AudioRingWrite].HwChannels) { if (pts != (int64_t) INT64_C(0x8000000000000000) && AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) { Debug(3, "audio: a/v %d %s\n", (int)(pts - AudioRing[AudioRingWrite].PTS) / 90, AudioRunning ? "running" : "stopped"); } Debug(3, "audio: start %4zdms %s|%s video ready\n", (RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample), Timestamp2String(pts), Timestamp2String(AudioRing[AudioRingWrite].PTS)); if (!AudioRunning) { size_t used; used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer); // enough video + audio buffered if (AudioStartThreshold < used) { // too much audio buffered, skip it if (AudioStartThreshold < used) { Debug(3, "audio: start %4zdms skip video ready\n", ((used - AudioStartThreshold) * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample)); RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, used - AudioStartThreshold); } AudioRunning = 1; pthread_cond_signal(&AudioStartCond); } } } AudioVideoIsReady = 1; #endif } /** ** Flush audio buffers. */ void AudioFlushBuffers(void) { int old; int i; if (atomic_read(&AudioRingFilled) >= AUDIO_RING_MAX) { // wait for space in ring buffer, should never happen for (i = 0; i < 24 * 2; ++i) { if (atomic_read(&AudioRingFilled) < AUDIO_RING_MAX) { break; } Debug(3, "audio: flush out of ring buffers\n"); usleep(1 * 1000); // avoid hot polling } if (atomic_read(&AudioRingFilled) >= AUDIO_RING_MAX) { // FIXME: We can set the flush flag in the last wrote ring buffer Error(_("audio: flush out of ring buffers\n")); return; } } old = AudioRingWrite; AudioRingWrite = (AudioRingWrite + 1) % AUDIO_RING_MAX; AudioRing[AudioRingWrite].FlushBuffers = 1; AudioRing[AudioRingWrite].Passthrough = AudioRing[old].Passthrough; AudioRing[AudioRingWrite].HwSampleRate = AudioRing[old].HwSampleRate; AudioRing[AudioRingWrite].HwChannels = AudioRing[old].HwChannels; AudioRing[AudioRingWrite].InSampleRate = AudioRing[old].InSampleRate; AudioRing[AudioRingWrite].InChannels = AudioRing[old].InChannels; AudioRing[AudioRingWrite].PTS = INT64_C(0x8000000000000000); RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer)); Debug(3, "audio: reset video ready\n"); AudioVideoIsReady = 0; AudioSkip = 0; atomic_inc(&AudioRingFilled); // FIXME: wait for flush complete needed? for (i = 0; i < 24 * 2; ++i) { if (!AudioRunning) { // wakeup thread to flush buffers AudioRunning = 1; pthread_cond_signal(&AudioStartCond); } // FIXME: waiting on zero isn't correct, but currently works if (!atomic_read(&AudioRingFilled)) { break; } usleep(1 * 1000); // avoid hot polling } Debug(3, "audio: audio flush %dms\n", i); } /** ** Call back to play audio polled. */ void AudioPoller(void) { // FIXME: write poller } /** ** Get free bytes in audio output. */ int AudioFreeBytes(void) { return AudioRing[AudioRingWrite].RingBuffer ? RingBufferFreeBytes(AudioRing[AudioRingWrite].RingBuffer) : INT32_MAX; } /** ** Get used bytes in audio output. */ int AudioUsedBytes(void) { // FIXME: not correct, if multiple buffer are in use return AudioRing[AudioRingWrite].RingBuffer ? RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) : 0; } /** ** Get audio delay in time stamps. ** ** @returns audio delay in time stamps. */ int64_t AudioGetDelay(void) { int64_t pts; if (!AudioRunning) { return 0L; // audio not running } if (!AudioRing[AudioRingRead].HwSampleRate) { return 0L; // audio not setup } if (atomic_read(&AudioRingFilled)) { return 0L; // multiple buffers, invalid delay } pts = AudioUsedModule->GetDelay(); pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer) * 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels * AudioBytesProSample); Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n", RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer), pts / 90); return pts; } /** ** Set audio clock base. ** ** @param pts audio presentation timestamp */ void AudioSetClock(int64_t pts) { if (AudioRing[AudioRingWrite].PTS != pts) { Debug(4, "audio: set clock %s -> %s pts\n", Timestamp2String(AudioRing[AudioRingWrite].PTS), Timestamp2String(pts)); } AudioRing[AudioRingWrite].PTS = pts; } /** ** Get current audio clock. ** ** @returns the audio clock in time stamps. */ int64_t AudioGetClock(void) { // (cast) needed for the evil gcc if (AudioRing[AudioRingRead].PTS != (int64_t) INT64_C(0x8000000000000000)) { int64_t delay; // delay zero, if no valid time stamp if ((delay = AudioGetDelay())) { if (AudioRing[AudioRingRead].Passthrough) { return AudioRing[AudioRingRead].PTS + 0 * 90 - delay; } return AudioRing[AudioRingRead].PTS + 0 * 90 - delay; } } return INT64_C(0x8000000000000000); } /** ** Set mixer volume (0-1000) ** ** @param volume volume (0 .. 1000) */ void AudioSetVolume(int volume) { AudioVolume = volume; AudioMute = !volume; // reduce loudness for stereo output if (AudioStereoDescent && AudioRing[AudioRingRead].InChannels == 2 && !AudioRing[AudioRingRead].Passthrough) { volume -= AudioStereoDescent; if (volume < 0) { volume = 0; } else if (volume > 1000) { volume = 1000; } } AudioAmplifier = volume; if (!AudioSoftVolume) { AudioUsedModule->SetVolume(volume); } } /** ** Setup audio for requested format. ** ** @param freq sample frequency ** @param channels number of channels ** @param passthrough use pass-through (AC-3, ...) device ** ** @retval 0 everything ok ** @retval 1 didn't support frequency/channels combination ** @retval -1 something gone wrong ** ** @todo add support to report best fitting format. */ int AudioSetup(int *freq, int *channels, int passthrough) { Debug(3, "audio: setup channels %d frequency %dHz %s\n", *channels, *freq, passthrough ? "pass-through" : ""); // invalid parameter if (!freq || !channels || !*freq || !*channels) { Debug(3, "audio: bad channels or frequency parameters\n"); // FIXME: set flag invalid setup return -1; } return AudioRingAdd(*freq, *channels, passthrough); } /** ** Play audio. */ void AudioPlay(void) { if (!AudioPaused) { Debug(3, "audio: not paused, check the code\n"); return; } Debug(3, "audio: resumed\n"); AudioPaused = 0; AudioEnqueue(NULL, 0); // wakeup thread } /** ** Pause audio. */ void AudioPause(void) { if (AudioPaused) { Debug(3, "audio: already paused, check the code\n"); return; } Debug(3, "audio: paused\n"); AudioPaused = 1; } /** ** Set audio buffer time. ** ** PES audio packets have a max distance of 300 ms. ** TS audio packet have a max distance of 100 ms. ** The period size of the audio buffer is 24 ms. ** With streamdev sometimes extra +100ms are needed. */ void AudioSetBufferTime(int delay) { if (!delay) { delay = 336; } AudioBufferTime = delay; } /** ** Enable/disable software volume. ** ** @param onoff -1 toggle, true turn on, false turn off */ void AudioSetSoftvol(int onoff) { if (onoff < 0) { AudioSoftVolume ^= 1; } else { AudioSoftVolume = onoff; } } /** ** Set normalize volume parameters. ** ** @param onoff -1 toggle, true turn on, false turn off ** @param maxfac max. factor of normalize /1000 */ void AudioSetNormalize(int onoff, int maxfac) { if (onoff < 0) { AudioNormalize ^= 1; } else { AudioNormalize = onoff; } AudioMaxNormalize = maxfac; } /** ** Set volume compression parameters. ** ** @param onoff -1 toggle, true turn on, false turn off ** @param maxfac max. factor of compression /1000 */ void AudioSetCompression(int onoff, int maxfac) { if (onoff < 0) { AudioCompression ^= 1; } else { AudioCompression = onoff; } AudioMaxCompression = maxfac; if (!AudioCompressionFactor) { AudioCompressionFactor = 1000; } if (AudioCompressionFactor > AudioMaxCompression) { AudioCompressionFactor = AudioMaxCompression; } } /** ** Set stereo loudness descent. ** ** @param delta value (/1000) to reduce stereo volume */ void AudioSetStereoDescent(int delta) { AudioStereoDescent = delta; AudioSetVolume(AudioVolume); // update channel delta } /** ** Set pcm audio device. ** ** @param device name of pcm device (fe. "hw:0,9" or "/dev/dsp") ** ** @note this is currently used to select alsa/OSS output module. */ void AudioSetDevice(const char *device) { if (!AudioModuleName) { AudioModuleName = "alsa"; // detect alsa/OSS if (!device[0]) { AudioModuleName = "noop"; } else if (device[0] == '/') { AudioModuleName = "oss"; } } AudioPCMDevice = device; } /** ** Set pass-through audio device. ** ** @param device name of pass-through device (fe. "hw:0,1") ** ** @note this is currently usable with alsa only. */ void AudioSetPassthroughDevice(const char *device) { if (!AudioModuleName) { AudioModuleName = "alsa"; // detect alsa/OSS if (!device[0]) { AudioModuleName = "noop"; } else if (device[0] == '/') { AudioModuleName = "oss"; } } AudioPassthroughDevice = device; } /** ** Set pcm audio mixer channel. ** ** @param channel name of the mixer channel (fe. PCM or Master) ** ** @note this is currently used to select alsa/OSS output module. */ void AudioSetChannel(const char *channel) { AudioMixerChannel = channel; } /** ** Set automatic AES flag handling. ** ** @param onoff turn setting AES flag on or off */ void AudioSetAutoAES(int onoff) { if (onoff < 0) { AudioAppendAES ^= 1; } else { AudioAppendAES = onoff; } } /** ** Initialize audio output module. ** ** @todo FIXME: make audio output module selectable. */ void AudioInit(void) { unsigned u; const char *name; int freq; int chan; name = "noop"; #ifdef USE_OSS name = "oss"; #endif #ifdef USE_ALSA name = "alsa"; #endif if (AudioModuleName) { name = AudioModuleName; } // // search selected audio module. // for (u = 0; u < sizeof(AudioModules) / sizeof(*AudioModules); ++u) { if (!strcasecmp(name, AudioModules[u]->Name)) { AudioUsedModule = AudioModules[u]; Info(_("audio: '%s' output module used\n"), AudioUsedModule->Name); goto found; } } Error(_("audio: '%s' output module isn't supported\n"), name); AudioUsedModule = &NoopModule; return; found: AudioDoingInit = 1; AudioRingInit(); AudioUsedModule->Init(); // // Check which channels/rates/formats are supported // FIXME: we force 44.1Khz and 48Khz must be supported equal // FIXME: should use bitmap of channels supported in RatesInHw // FIXME: use loop over sample-rates freq = 44100; AudioRatesInHw[Audio44100] = 0; for (chan = 1; chan < 9; ++chan) { int tchan; int tfreq; tchan = chan; tfreq = freq; if (AudioUsedModule->Setup(&tfreq, &tchan, 0)) { AudioChannelsInHw[chan] = 0; } else { AudioChannelsInHw[chan] = chan; AudioRatesInHw[Audio44100] |= (1 << chan); } } freq = 48000; AudioRatesInHw[Audio48000] = 0; for (chan = 1; chan < 9; ++chan) { int tchan; int tfreq; if (!AudioChannelsInHw[chan]) { continue; } tchan = chan; tfreq = freq; if (AudioUsedModule->Setup(&tfreq, &tchan, 0)) { //AudioChannelsInHw[chan] = 0; } else { AudioChannelsInHw[chan] = chan; AudioRatesInHw[Audio48000] |= (1 << chan); } } freq = 192000; AudioRatesInHw[Audio192000] = 0; for (chan = 1; chan < 9; ++chan) { int tchan; int tfreq; if (!AudioChannelsInHw[chan]) { continue; } tchan = chan; tfreq = freq; if (AudioUsedModule->Setup(&tfreq, &tchan, 0)) { //AudioChannelsInHw[chan] = 0; } else { AudioChannelsInHw[chan] = chan; AudioRatesInHw[Audio192000] |= (1 << chan); } } // build channel support and conversion table for (u = 0; u < AudioRatesMax; ++u) { for (chan = 1; chan < 9; ++chan) { AudioChannelMatrix[u][chan] = 0; if (!AudioRatesInHw[u]) { // rate unsupported continue; } if (AudioChannelsInHw[chan]) { AudioChannelMatrix[u][chan] = chan; } else { switch (chan) { case 1: if (AudioChannelsInHw[2]) { AudioChannelMatrix[u][chan] = 2; } break; case 2: case 3: if (AudioChannelsInHw[4]) { AudioChannelMatrix[u][chan] = 4; break; } case 4: if (AudioChannelsInHw[5]) { AudioChannelMatrix[u][chan] = 5; break; } case 5: if (AudioChannelsInHw[6]) { AudioChannelMatrix[u][chan] = 6; break; } case 6: if (AudioChannelsInHw[7]) { AudioChannelMatrix[u][chan] = 7; break; } case 7: if (AudioChannelsInHw[8]) { AudioChannelMatrix[u][chan] = 8; break; } case 8: if (AudioChannelsInHw[6]) { AudioChannelMatrix[u][chan] = 6; break; } if (AudioChannelsInHw[2]) { AudioChannelMatrix[u][chan] = 2; break; } if (AudioChannelsInHw[1]) { AudioChannelMatrix[u][chan] = 1; break; } break; } } } } for (u = 0; u < AudioRatesMax; ++u) { Info(_("audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n"), AudioRatesTable[u], AudioChannelMatrix[u][1], AudioChannelMatrix[u][2], AudioChannelMatrix[u][3], AudioChannelMatrix[u][4], AudioChannelMatrix[u][5], AudioChannelMatrix[u][6], AudioChannelMatrix[u][7], AudioChannelMatrix[u][8]); } #ifdef USE_AUDIO_THREAD if (AudioUsedModule->Thread) { // supports threads AudioInitThread(); } #endif AudioDoingInit = 0; } /** ** Cleanup audio output module. */ void AudioExit(void) { const AudioModule *module; Debug(3, "audio: %s\n", __FUNCTION__); #ifdef USE_AUDIO_THREAD if (AudioUsedModule->Thread) { // supports threads AudioExitThread(); } #endif module = AudioUsedModule; AudioUsedModule = &NoopModule; module->Exit(); AudioRingExit(); AudioRunning = 0; AudioPaused = 0; } #ifdef AUDIO_TEST //---------------------------------------------------------------------------- // Test //---------------------------------------------------------------------------- void AudioTest(void) { for (;;) { unsigned u; uint8_t buffer[16 * 1024]; // some random data int i; for (u = 0; u < sizeof(buffer); u++) { buffer[u] = random() & 0xffff; } Debug(3, "audio/test: loop\n"); for (i = 0; i < 100; ++i) { while (RingBufferFreeBytes(AlsaRingBuffer) > sizeof(buffer)) { AlsaEnqueue(buffer, sizeof(buffer)); } usleep(20 * 1000); } break; } } #include int SysLogLevel; ///< show additional debug informations /** ** Print version. */ static void PrintVersion(void) { printf("audio_test: audio tester Version " VERSION #ifdef GIT_REV "(GIT-" GIT_REV ")" #endif ",\n\t(c) 2009 - 2013 by Johns\n" "\tLicense AGPLv3: GNU Affero General Public License version 3\n"); } /** ** Print usage. */ static void PrintUsage(void) { printf("Usage: audio_test [-?dhv]\n" "\t-d\tenable debug, more -d increase the verbosity\n" "\t-? -h\tdisplay this message\n" "\t-v\tdisplay version information\n" "Only idiots print usage on stderr!\n"); } /** ** Main entry point. ** ** @param argc number of arguments ** @param argv arguments vector ** ** @returns -1 on failures, 0 clean exit. */ int main(int argc, char *const argv[]) { SysLogLevel = 0; // // Parse command line arguments // for (;;) { switch (getopt(argc, argv, "hv?-c:d")) { case 'd': // enabled debug ++SysLogLevel; continue; case EOF: break; case 'v': // print version PrintVersion(); return 0; case '?': case 'h': // help usage PrintVersion(); PrintUsage(); return 0; case '-': PrintVersion(); PrintUsage(); fprintf(stderr, "\nWe need no long options\n"); return -1; case ':': PrintVersion(); fprintf(stderr, "Missing argument for option '%c'\n", optopt); return -1; default: PrintVersion(); fprintf(stderr, "Unknown option '%c'\n", optopt); return -1; } break; } if (optind < argc) { PrintVersion(); while (optind < argc) { fprintf(stderr, "Unhandled argument '%s'\n", argv[optind++]); } return -1; } // // main loop // AudioInit(); for (;;) { unsigned u; uint8_t buffer[16 * 1024]; // some random data for (u = 0; u < sizeof(buffer); u++) { buffer[u] = random() & 0xffff; } Debug(3, "audio/test: loop\n"); for (;;) { while (RingBufferFreeBytes(AlsaRingBuffer) > sizeof(buffer)) { AlsaEnqueue(buffer, sizeof(buffer)); } } } AudioExit(); return 0; } #endif vdr-plugin-softhddevice/.gitignore0000644000175000017500000000020212644034136017123 0ustar tobiastobias# gitignore(5) file *.[oa] *~ .*.swp .gdb_history # work directory .chaos # generated files .dependencies libvdr-softhddevice.so* vdr-plugin-softhddevice/video.h0000644000175000017500000001475112644034136016430 0ustar tobiastobias/// /// @file video.h @brief Video module header file /// /// Copyright (c) 2009 - 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 7b676db0db3e7773e753356b9a94a1a369ce20bb $ ////////////////////////////////////////////////////////////////////////////// /// @addtogroup Video /// @{ //---------------------------------------------------------------------------- // Typedefs //---------------------------------------------------------------------------- /// Video hardware decoder typedef typedef struct _video_hw_decoder_ VideoHwDecoder; /// Video output stream typedef typedef struct __video_stream__ VideoStream; //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- extern signed char VideoHardwareDecoder; ///< flag use hardware decoder extern char VideoIgnoreRepeatPict; ///< disable repeat pict warning extern int VideoAudioDelay; ///< audio/video delay extern char ConfigStartX11Server; ///< flag start the x11 server //---------------------------------------------------------------------------- // Prototypes //---------------------------------------------------------------------------- /// Allocate new video hardware decoder. extern VideoHwDecoder *VideoNewHwDecoder(VideoStream *); /// Deallocate video hardware decoder. extern void VideoDelHwDecoder(VideoHwDecoder *); #ifdef LIBAVCODEC_VERSION /// Get and allocate a video hardware surface. extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *); /// Release a video hardware surface extern void VideoReleaseSurface(VideoHwDecoder *, unsigned); /// Callback to negotiate the PixelFormat. extern enum PixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum PixelFormat *); /// Render a ffmpeg frame. extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *); /// Get hwaccel context for ffmpeg. extern void *VideoGetHwAccelContext(VideoHwDecoder *); #ifdef AVCODEC_VDPAU_H /// Draw vdpau render state. extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *); #endif #endif /// Poll video events. extern void VideoPollEvent(void); /// Wakeup display handler. extern void VideoDisplayWakeup(void); /// Set video device. extern void VideoSetDevice(const char *); /// Get video driver name. extern const char *VideoGetDriverName(void); /// Set video geometry. extern int VideoSetGeometry(const char *); /// Set 60Hz display mode. extern void VideoSet60HzMode(int); /// Set soft start audio/video sync. extern void VideoSetSoftStartSync(int); /// Set show black picture during channel switch. extern void VideoSetBlackPicture(int); /// Set brightness adjustment. extern void VideoSetBrightness(int); /// Set contrast adjustment. extern void VideoSetContrast(int); /// Set saturation adjustment. extern void VideoSetSaturation(int); /// Set hue adjustment. extern void VideoSetHue(int); /// Set video output position. extern void VideoSetOutputPosition(VideoHwDecoder *, int, int, int, int); /// Set video mode. extern void VideoSetVideoMode(int, int, int, int); /// Set 4:3 display format. extern void VideoSet4to3DisplayFormat(int); /// Set other display format. extern void VideoSetOtherDisplayFormat(int); /// Set video fullscreen mode. extern void VideoSetFullscreen(int); /// Set deinterlace. extern void VideoSetDeinterlace(int[]); /// Set skip chroma deinterlace. extern void VideoSetSkipChromaDeinterlace(int[]); /// Set inverse telecine. extern void VideoSetInverseTelecine(int[]); /// Set scaling. extern void VideoSetScaling(int[]); /// Set denoise. extern void VideoSetDenoise(int[]); /// Set sharpen. extern void VideoSetSharpen(int[]); /// Set cut top and bottom. extern void VideoSetCutTopBottom(int[]); /// Set cut left and right. extern void VideoSetCutLeftRight(int[]); /// Set studio levels. extern void VideoSetStudioLevels(int); /// Set background. extern void VideoSetBackground(uint32_t); /// Set audio delay. extern void VideoSetAudioDelay(int); /// Set auto-crop parameters. extern void VideoSetAutoCrop(int, int, int); /// Clear OSD. extern void VideoOsdClear(void); /// Draw an OSD ARGB image. extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int); /// Get OSD size. extern void VideoGetOsdSize(int *, int *); /// Set OSD size. extern void VideoSetOsdSize(int, int); /// Set Osd 3D Mode extern void VideoSetOsd3DMode(int); /// Set video clock. extern void VideoSetClock(VideoHwDecoder *, int64_t); /// Get video clock. extern int64_t VideoGetClock(const VideoHwDecoder *); /// Set closing flag. extern void VideoSetClosing(VideoHwDecoder *); /// Reset start of frame counter extern void VideoResetStart(VideoHwDecoder *); /// Set trick play speed. extern void VideoSetTrickSpeed(VideoHwDecoder *, int); /// Grab screen. extern uint8_t *VideoGrab(int *, int *, int *, int); /// Grab screen raw. extern uint8_t *VideoGrabService(int *, int *, int *); /// Get decoder statistics. extern void VideoGetStats(VideoHwDecoder *, int *, int *, int *, int *); /// Get video stream size extern void VideoGetVideoSize(VideoHwDecoder *, int *, int *, int *, int *); extern void VideoOsdInit(void); ///< Setup osd. extern void VideoOsdExit(void); ///< Cleanup osd. extern void VideoInit(const char *); ///< Setup video module. extern void VideoExit(void); ///< Cleanup and exit video module. /// Poll video input buffers. extern int VideoPollInput(VideoStream *); /// Decode video input buffers. extern int VideoDecodeInput(VideoStream *); /// Get number of input buffers. extern int VideoGetBuffers(const VideoStream *); /// Set DPMS at Blackscreen switch extern void SetDPMSatBlackScreen(int); /// Raise the frontend window extern int VideoRaiseWindow(void); /// @} vdr-plugin-softhddevice/.gitattributes0000644000175000017500000000013112644034136020027 0ustar tobiastobias# gitattributes(5) file *.[15] ident *.[ch] ident *.cpp ident *.txt ident Makefile ident vdr-plugin-softhddevice/misc.h0000644000175000017500000000734412644034136016255 0ustar tobiastobias/// /// @file misc.h @brief Misc function header file /// /// Copyright (c) 2009 - 2012 by Lutz Sammer. All Rights Reserved. /// /// Contributor(s): /// Copied from uwm. /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: f5ff4b300aa33eb721d658c0c9374c8499b67318 $ ////////////////////////////////////////////////////////////////////////////// /// @addtogroup misc /// @{ #include #include #include // clock_gettime ////////////////////////////////////////////////////////////////////////////// // Defines ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // Declares ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // Variables ////////////////////////////////////////////////////////////////////////////// extern int SysLogLevel; ///< how much information wanted ////////////////////////////////////////////////////////////////////////////// // Prototypes ////////////////////////////////////////////////////////////////////////////// static inline void Syslog(const int, const char *format, ...) __attribute__ ((format(printf, 2, 3))); ////////////////////////////////////////////////////////////////////////////// // Inlines ////////////////////////////////////////////////////////////////////////////// #ifdef DEBUG #define DebugLevel 4 /// private debug level #else #define DebugLevel 0 /// private debug level #endif /** ** Syslog output function. ** ** - 0 fatal errors and errors ** - 1 warnings ** - 2 info ** - 3 important debug and fixme's */ static inline void Syslog(const int level, const char *format, ...) { if (SysLogLevel > level || DebugLevel > level) { va_list ap; va_start(ap, format); vsyslog(LOG_ERR, format, ap); va_end(ap); } } /** ** Show error. */ #define Error(fmt...) Syslog(0, fmt) /** ** Show fatal error. */ #define Fatal(fmt...) do { Error(fmt); abort(); } while (0) /** ** Show warning. */ #define Warning(fmt...) Syslog(1, fmt) /** ** Show info. */ #define Info(fmt...) Syslog(2, fmt) /** ** Show debug. */ #ifdef DEBUG #define Debug(level, fmt...) Syslog(level, fmt) #else #define Debug(level, fmt...) /* disabled */ #endif #ifndef AV_NOPTS_VALUE #define AV_NOPTS_VALUE INT64_C(0x8000000000000000) #endif /** ** Nice time-stamp string. ** ** @param ts dvb time stamp */ static inline const char *Timestamp2String(int64_t ts) { static char buf[4][16]; static int idx; if (ts == (int64_t) AV_NOPTS_VALUE) { return "--:--:--.---"; } idx = (idx + 1) % 3; snprintf(buf[idx], sizeof(buf[idx]), "%2d:%02d:%02d.%03d", (int)(ts / (90 * 3600000)), (int)((ts / (90 * 60000)) % 60), (int)((ts / (90 * 1000)) % 60), (int)((ts / 90) % 1000)); return buf[idx]; } /** ** Get ticks in ms. ** ** @returns ticks in ms, */ static inline uint32_t GetMsTicks(void) { #ifdef CLOCK_MONOTONIC struct timespec tspec; clock_gettime(CLOCK_MONOTONIC, &tspec); return (tspec.tv_sec * 1000) + (tspec.tv_nsec / (1000 * 1000)); #else struct timeval tval; if (gettimeofday(&tval, NULL) < 0) { return 0; } return (tval.tv_sec * 1000) + (tval.tv_usec / 1000); #endif } /// @} vdr-plugin-softhddevice/softhddev.h0000644000175000017500000000745412644034136017312 0ustar tobiastobias/// /// @file softhddev.h @brief software HD device plugin header file. /// /// Copyright (c) 2011 - 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: efb2e251dd7082138ec21609478d2402be8208cf $ ////////////////////////////////////////////////////////////////////////////// #ifdef __cplusplus extern "C" { #endif /// C callback feed key press extern void FeedKeyPress(const char *, const char *, int, int, const char *); /// C plugin get osd size and ascpect extern void GetOsdSize(int *, int *, double *); /// C plugin close osd extern void OsdClose(void); /// C plugin draw osd pixmap extern void OsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int); /// C plugin play audio packet extern int PlayAudio(const uint8_t *, int, uint8_t); /// C plugin play TS audio packet extern int PlayTsAudio(const uint8_t *, int); /// C plugin set audio volume extern void SetVolumeDevice(int); /// C plugin reset channel id (restarts audio) extern void ResetChannelId(void); /// C plugin play video packet extern int PlayVideo(const uint8_t *, int); /// C plugin play TS video packet extern void PlayTsVideo(const uint8_t *, int); /// C plugin grab an image extern uint8_t *GrabImage(int *, int, int, int, int); /// C plugin set play mode extern int SetPlayMode(int); /// C plugin get current system time counter extern int64_t GetSTC(void); /// C plugin get video stream size and aspect extern void GetVideoSize(int *, int *, double *); /// C plugin set trick speed extern void TrickSpeed(int); /// C plugin clears all video and audio data from the device extern void Clear(void); /// C plugin sets the device into play mode extern void Play(void); /// C plugin sets the device into "freeze frame" mode extern void Freeze(void); /// C plugin mute audio extern void Mute(void); /// C plugin display I-frame as a still picture. extern void StillPicture(const uint8_t *, int); /// C plugin poll if ready extern int Poll(int); /// C plugin flush output buffers extern int Flush(int); /// C plugin command line help extern const char *CommandLineHelp(void); /// C plugin process the command line arguments extern int ProcessArgs(int, char *const[]); /// C plugin exit + cleanup extern void SoftHdDeviceExit(void); /// C plugin start code extern int Start(void); /// C plugin stop code extern void Stop(void); /// C plugin house keeping extern void Housekeeping(void); /// C plugin main thread hook extern void MainThreadHook(void); /// Suspend plugin extern void Suspend(int, int, int); /// Resume plugin extern void Resume(void); /// Get decoder statistics extern void GetStats(int *, int *, int *, int *); /// C plugin scale video extern void ScaleVideo(int, int, int, int); /// Set Pip position extern void PipSetPosition(int, int, int, int, int, int, int, int); /// Pip start extern void PipStart(int, int, int, int, int, int, int, int); /// Pip stop extern void PipStop(void); /// Pip play video packet extern int PipPlayVideo(const uint8_t *, int); extern const char *X11DisplayName; ///< x11 display name #ifdef __cplusplus } #endif vdr-plugin-softhddevice/audio.h0000644000175000017500000000547012644034136016421 0ustar tobiastobias/// /// @file audio.h @brief Audio module headerfile /// /// Copyright (c) 2009 - 2014 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: da055758af70cfdb9ab1989d5fcfb218c5d6a366 $ ////////////////////////////////////////////////////////////////////////////// /// @addtogroup Audio /// @{ //---------------------------------------------------------------------------- // Prototypes //---------------------------------------------------------------------------- extern void AudioEnqueue(const void *, int); ///< buffer audio samples extern void AudioFlushBuffers(void); ///< flush audio buffers extern void AudioPoller(void); ///< poll audio events/handling extern int AudioFreeBytes(void); ///< free bytes in audio output extern int AudioUsedBytes(void); ///< used bytes in audio output extern int64_t AudioGetDelay(void); ///< get current audio delay extern void AudioSetClock(int64_t); ///< set audio clock base extern int64_t AudioGetClock(); ///< get current audio clock extern void AudioSetVolume(int); ///< set volume extern int AudioSetup(int *, int *, int); ///< setup audio output extern void AudioPlay(void); ///< play audio extern void AudioPause(void); ///< pause audio extern void AudioSetBufferTime(int); ///< set audio buffer time extern void AudioSetSoftvol(int); ///< enable/disable softvol extern void AudioSetNormalize(int, int); ///< set normalize parameters extern void AudioSetCompression(int, int); ///< set compression parameters extern void AudioSetStereoDescent(int); ///< set stereo loudness descent extern void AudioSetDevice(const char *); ///< set PCM audio device /// set pass-through device extern void AudioSetPassthroughDevice(const char *); extern void AudioSetChannel(const char *); ///< set mixer channel extern void AudioSetAutoAES(int); ///< set automatic AES flag handling extern void AudioInit(void); ///< setup audio module extern void AudioExit(void); ///< cleanup and exit audio module //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- extern char AudioAlsaDriverBroken; ///< disable broken driver message extern char AudioAlsaNoCloseOpen; ///< disable alsa close/open fix extern char AudioAlsaCloseOpenDelay; ///< enable alsa close/open delay fix /// @} vdr-plugin-softhddevice/ChangeLog0000644000175000017500000005152612644034136016724 0ustar tobiastobiasUser johns Date: Preparations for new ffmpeg VDPAU API. Added VDPAU multi decoder loop changes to VA-API code. Reenabled VA-API auto detection. Check and enforce USE_PIP is defined, for new code. Fix comment spelling. Disabled old code before removement. Handle change of audio ac3 downmix direct. Speedup queuing output surface, when decoder buffers are full. Fix bug: info shows wrong decoded video surfaces. Calculate queued output surfaces and show them in info message. Add support for new API of vdr 2.3.1. Fix bug: EnableDPMSatBlackScreen only available with USE_SCREENSAVER. - H264_EOS_TRICKSPEED and USE_MPEG_COMPLETE enabled as default. User master_red Date: Mon Aug 10 15:29:33 CEST 2015 Configurable enable DPMS, while black screen is displayed. User johns Date: Tue Jun 30 10:12:09 CET 2015 Fix bug: wrong and crash, if vdr draws pixmaps outside OSD. Fix bug: wrong version number check for av_frame_alloc(), ... Workaround for ffmpeg 2.6 artifacts. Fix bug: brightness and .. are calculated wrong. Add automatic frame rate detection for older ffmpeg versions. Fix bug: destroyed vdpau surfaces still used in queue. Fix bug: need signed char, if compiler has unsigned chars. Try smaller audio puffer, if default size fails. Fix bug: center cut-out didn't use cut off pixels. Fix bug #2058: support for Make.plgcfg. Fix for compile with vdr 2.1.10, for older vdr versions. User jinx Date: Mon Feb 16 09:58:06 CET 2015 Enable toggle AC3 downmix. User johns Date: Thu Feb 12 10:30:50 CET 2015 Compile with vdr 2.1.10. Fix bug: AVCodecContext.framerate not supported. Use video stream frame rate for A/V sync. User Antti Seppälä Date: Thu Oct 16 14:15:15 CEST 2014 Corrected black surface for va-api. User johns Date: Thu Oct 16 14:05:17 CEST 2014 Newer va-api intel drivers support PutImage. Use more portable fork for vfork. Fix crash with VA-API vdpau backend. User mini73 Date: Sat Oct 11 16:53:18 CEST 2014 Fix bug: random rubbish at the end of letter. User johns Date: Tue Sep 23 12:36:39 CEST 2014 Fix audio thread close race condition. Support ffmpeg new AVFrame API in the audio codec. Config for automatic AES parameters. Use GCC built-in functions for atomic operations. User master_red Date: Wed Jun 4 14:44:32 CEST 2014 Support detach or suspend in plugin menu. User johns Date: Fri May 30 10:18:20 CEST 2014 Fix "make clean-plugins". Fix compile with newer libav. Fix OSD bugs. Add some VA-API VPP info outputs. Remove build files for old unstable VDR. User hd.brummy Date: Thu Jan 30 10:40:49 CET 2014 Update gentoo ebuild. User johns Date: Thu Jan 30 10:36:53 CET 2014 Fix spelling in arguments help. Add Workaround for alsa blocking audio device. Improves thread handling for audio flush and close. User mini73 Date: Fri Jan 24 11:30:49 CET 2014 Fix bug: learing x11 remote keys fails. Add support for umlauts in input fields. User johns Date: Tue Jan 14 14:59:44 CET 2014 Fix alternative OSD support with VDPAU bitmap surfaces. Fix compile error with VDR 2.1.3. Fix bug: memory leak. PIP close clears the last used PIP channel. Fix bug: -DOSD_DEBUG uses old (deleted) variable. Fix bug: Option softhddevice.BlackPicture has no effect. User Dr. Seltsam Date: Tue Nov 5 16:46:34 CET 2013 Add support to configure and clear buffers on channel switch. User johns Date: Tue Oct 8 10:18:04 CET 2013 CLOCK_REALTIME -> CLOCK_MONOTONIC to allow time changes. Add function VideoStreamOpen and always use VideoStreamClose. Softer audio/video sync. Add function GetStats to the video output module. Add function ResetStart to the video output module. Add function SetClosing to the video output module. Generalize GetVaapiContext to GetHwAccelContext. Add compile time configurable trickspeed packets dump. Fix bug #1410: wrong spelled AC-3 and E-AC-3. Add compile time selectable h264 trickspeed workaround. Use ffmpeg new names AVCodecID, AV_CODEC_... . Fix bug: video lagging behind after recording stop. Reduce PES error messages. Fix bug #1392: Wrong value for mixing LFE. Fix bug: wrong grab size, introduced with AMD VDPAU. Use VDR SPU decoder as default. Fix bug: grab image negative quality isn't the default 100. Support AMD VDPAU with surface size != requested size. Add cache for auto-crop buffer. Fix opengl and opengl threads bugs. Initial opengl support with va-api only. Fix "broken driver" message if empty ring buffer. Enable seamless audio track change. Fix bug #1302: Unsupported pixel format crash. Fix the fix, when sillpicture is called in suspend mode. Fix crash, when sillpicture is called in suspend mode. Add workaround for zero width+height and ffmpeg >= 1.2. User johns Date: Sun Mar 17 15:52:42 CET 2013 Release Version 0.6.0 Adds H264 only hardware decoder for still-pictures. Enable optional VDR-SPU deocder support. User anbr Date: Sun Mar 17 15:49:46 CET 2013 Update german translation. User cyril Date: Wed Mar 6 17:05:10 CET 2013 Adds raise softhddevice video window support. User johns Date: Wed Mar 6 10:30:27 CET 2013 Adds optional only complete mpeg packets support. Fixes text of EAC-3 pass-through setup. Try to start or connect to X11 server with -xx. Try to use HBR (High Bit-Rate) for EAC3. Improved pass-through (PCM+EAC3) support. Support VDR 1.7.36 new build system. Improves VDPAU display preemption handling. Add modifiers to X11 remote key names (from Sibbi). Add compatibility with >=ffmpeg 1.1. Adds PIP (Picture-in-Picture) support. Split mpeg packets in receiver thread. User horchi Date: Tue Jan 1 17:58:54 CET 2013 Adds VDR SeduAtmo Plugin support. User johns Date: Tue Jan 1 15:21:28 CET 2013 Support multiple streams with ScaleVideo. Makes 4:3 and 16:9 display format configurable. Don't use DVB display format. User Zoolook Date: Tue Jan 1 12:49:19 CET 2013 Add support for new vdr ScaleVideo API. User johns Date: Tue Jan 1 12:40:12 CET 2013 Add support for old PES HDTV recording. Disable trickspeed hack, to prevent ffmpeg crash. Makes X11 server arguments configurable. Add german translation. User FireFly Date: Sun Nov 18 21:15:50 CET 2012 Add german translation. User johns Date: Thu Nov 15 22:28:55 CET 2012 Release Version 0.5.2 User maverick-me Date: Mon Nov 5 23:13:42 CET 2012 Fix 3d OSD position. User maverick-me Date: Tue Oct 30 16:50:25 CET 2012 Add support to change the OSD for 3d SBS/TB streams. User johns Date: Tue Oct 30 12:11:25 CEST 2012 Use software decoder for still-pictures. Add Feature #1103: change audio devices without vdr restart. Fix bug #1089: Vdpau decoder used wrong number of mpeg reference frames. Fix bug: with some streams endless loop in pes audio parser. Report correct video size in cSoftHdDevice::GetVideoSize. Add picture adjustment support for vdpau. Revert "mpeg_vdpau" back to "mpegvideo_vdpau". Fix bug: Can't use software decoder with VDPAU. Resume plugin, if suspend control stops. Removes old audio code (!USE_AUDIORING). Use -DOSD_DEBUG to debug OSD. User arttupii Date: Tue Aug 7 16:46:23 2012 +0200 Fix bug #909: Subtitles destroy menu. Fix bug #1003: Subtitles overlapping. User johns Date: Fri Jul 27 19:15:48 CEST 2012 Free used X11 resources colormap, pixmap, cursor. Fix bug: spelling USE_VAPI wrong, missing functions. User johns Date: Tue Jul 3 16:35:46 CEST 2012 Release Version 0.5.1 Add commandline support to disable hardware decoder. Display frame statistics in plugin menu. Fix bug: 100% CPU use during playback. Fix bug: audio use 100% CPU during pause. Guard audio skip against old PTS values. Improved audio skip, after channel switch. Setup add preview of collapsed tree. Fix bug: dvd plugin times out. Makes OSD size configurable. Support display with non-square pixels. Initial ADTS (AAC+) support. Fix bug #959: log flooded with service messages. User durchflieger Date: Mon Apr 30 14:46:51 CEST 2012 Adds VDR DFAtmo Plugin support. User johns Date: Mon Apr 30 13:56:26 CEST 2012 Fix bug: don't normalize or compress pass-through samples. Make audio ring buffer size a multiple of 3,5,7,8. Add reset ring buffer support. Fix bug: alloca wrong size for audio buffer. Handle jump in stream like stream start. Always compile audio drift correction. Add audio drift correction configuration to the setup. User mini73 Date: Fri Apr 20 16:51:14 CEST 2012 Add support for svdr command "stat". User johns Date: Fri Apr 20 16:05:40 CEST 2012 Made showing black picture configurable. Show black picture, if no video stream is available. Setup split into foldable sections. Adds show cursor on pointer move and hide after 200ms. Adds Hot-key support for auto-crop enable/disable/toggle. Adds detached start mode. Fix bug: VDPAU looses preemption callback. Fix bug: X11 server keeps sending USR1 signals, which confuses suspend. Show message for hot-keys. Fix bug: playback errors with old PES recordings. Adds Hot-key support for 4:3 zoom modes. User johns Date: Sat Apr 7 20:21:16 CEST 2012 Release Version 0.5.0 Change audio/video delay with hot-key. Enable/disable/toggle fullscreen with hot-key (Feature #930). User: CafeDelMar Date: Thu Apr 5 22:44:06 CEST 2012 Cutting pixels are now configured for each resolution. User johns Date: Thu Apr 5 15:47:59 CEST 2012 Buffer less video and audio. Fix 100% cpu use, with mp3 plugin. Audio/Video sync rewrite, trick-speed support moved to video. Faster VdpauBlackSurface version. Fix bug: VideoSetPts wrong position for multi frame packets. User: CafeDelMar Date: Mon Mar 26 20:45:54 CEST 2012 Add VideoSkipPixels support. User johns Date: Fri Mar 23 18:43:20 CET 2012 Add optional argument (display) to ATTA svdrp commmand. Wakeup display to show OSD for remote learning mode. Support switching the primary device with svdrp. Disable and reenable screen saver and DPMS. Video source code cleanup. Fix fast backward with some h264 streams. Make soft start sync setup menu configurable. Fix bug: StillPicture NAL end of sequence is 10 and not 0x10. Fix bug: AudioEnqueue crash without sound card. User johns Date: Sun Mar 4 22:35:36 CET 2012 Release Version 0.4.9 Experimental ac3 audio drift correction support. Removes LPCM detection from TS parser. Rewrote video/audio start code. Add support for attach/detach plugin. OSS needs bigger audio buffers. Improved audio drift correction support. Experimental audio drift correction support. Add SVDRP HOTK command support. Increased audio buffer time for PES packets. Support configuration and set of video background. Survive lost X11 display. Fix bug: 100% cpu use with plugins like mp3. Wakeup display thread on channel switch, osd can now be shown without video. Makes 60Hz display mode configurable with setup.conf. Support downmix of AC-3 to stero. New audio PES packet parser. Fix bug: Grabbing a JPG image fails while suspended. Add support for hot keys. Add support to use characters input in edit mode. Adds trick speed support. User johns Date: Thu Feb 16 09:59:14 CET 2012 Release Version 0.4.8 Fix bug: wrong start of video packet. VDPAU: Enables inverse telecine configuration. Find AC3 (Dolby Digital) inside PES packet. Fix bug: audio increments invalid audio PTS. Fix bug: dvd plugin not working. Fix bug: used frame-> instead of video_ctx-> for old libav/ffmpeg. User johns Date: Mon Feb 13 23:20:26 CET 2012 Release Version 0.4.7 User FireFly Date: Mon Feb 13 20:14:11 CET 2012 Fix bug: unscaled jpeg includes PNG header. User johns Date: Mon Feb 13 14:58:26 CET 2012 VDPAU: Studio levels could be configured in the setup menu. Window size defaults to fullscreen, if no geometry is given. User m.Rcu Date: Sun Feb 12 20:28:22 CET 2012 Jpeg screengrab use VDR RgbToJpeg function. User johns Date: Sun Feb 12 20:14:43 CET 2012 Add play/pause audio support. Fix bug: audible glitch when switching AC-3 pass-through <-> none. Fix bug: mpeg stills not displayed. Detect audio stream type only after stream switch. Detect more h264 streams with leading zeros. VDPAU: support for studio levels added. Add support for skip chroma deinterlace to software deinterlacer. Type of software deinterlacer now configurable from setup menu. Mixer channel could be set through command line option. Fix bug: LFE moved to wrong position. Guard suspend/resume against multiple calls. Add support for AAC LATM audio streams. Fix bug: alsa and ffmpeg use different channel layout. Support more LPCM sample rates and number of channels. Quick&dirty support for mpeg LPCM streams. Workaround for text2skin undrawn OSD areas. Detect dvb LPCM stream and ignore it. User johns Date: Thu Feb 2 23:29:35 CET 2012 Release Version 0.4.6 Warn only on the first duplicated frame in sequence. Increase audio buffer, if bigger audio delay is used. Makes SkipLines configure in setup menu. Auto-crop only enabled with normal 4:3 display mode. Vaapi updates OSD when cropping changes. Add A-V info output and compile time option. Fix bug: VA-API intel software decoder broken by aspect commit. Add support for 4:3 output modes. Quicker auto-crop after channel switch. Add auto-crop support for Intel VA-API backend. Fix bug: Auto-Crop logo skip didn't use displayed width. Workaround for mpeg2 FFMpeg + VA-API + Intel GPU hung. Fix bug: Missing vaSyncSurface and vaDestroyImage. Fix bug: Only black picture with VA-API hw decoder. User HelAu Date: Mon Jan 30 16:54:47 CET 2012 Add support to start the plugin in suspended mode. User johns Date: Mon Jan 30 15:58:21 CET 2012 Finished rewrite of video code, to support output modules. Add aspect change support to software decoder path. Repair software decoder with vaapi vdpau backend. Add workaround for Intel VA-API MPEG GPU hung. User johns Date: Sat Jan 28 13:32:12 CET 2012 Release Version 0.4.5 Add configurable skip lines at video top and bottom. Add auto-crop tolerance configuration. Reduces audio latency, increases audio buffer time. Made video_test working again. Disabled VA-API Intel vaAssociateSubpicture workaround. Fix bug: Must release lock for VideoPollEvent. Allow faster video and audio sync. Fix bug: Software decoder use vaPutImage with intel backend. Fix bug: Artefacts are shown after mpeg2 channel switch. Fix bug: VideoReleaseSurface called after VideoExit. Support external players. Add VDPAU display preemption support. User m.Rcu Date: Tue Jan 24 22:38:30 CET 2012 Add support for grab jpeg image. User johns Date: Tue Jan 24 22:25:33 CET 2012 Fix bug: VaapiOsdExit doesn't deassociate osd surface. Fix bug: First OSD can show random pixels. Wait for X11 exit and kill it, if not. Fix still picture handling. Fix for dead-lock in VdpauExit. Workaround for dead-lock in VdpauExit. VDPAU: Add very primitive software scaler for grab image. VA-API: Add auto-crop support. Suspend can close/open X11 window, connection and audio device. User Morone Date: Sun Jan 22 16:43:23 CET 2012 Use different alsa devices for AC3/pass-through and pcm. User johns Date: Sun Jan 22 11:12:57 CET 2012 Add dummy player and control for suspend mode. Buffertime compile time configurable in ms. Date: Sat Jan 21 15:49:16 CET 2012 Release Version 0.4.0 VDPAU: Add grab image support. VDPAU: Add auto-crop support. VDPAU: Changed OSD alpha calculation. Fix bug: Used VideoSharpen for denoise settings. Instant update deinterlace/... configuration changes. Fix bug: AudioExit called without AudioInit crash. Date: Thu Jan 19 15:58:40 CET 2012 Release Version 0.3.5 OSD improvements: Use OSD size equal to video window. Update only dirty area(s) of OSD. Show/mix only used area of OSD. Fix bug: vpdau use previous resolution for deint, ... Fix software deinterlace with VA-API. Fix bug: transposed digits 567 should be 576. Audio module cleanup: Alsa + OSS can be included/build at the same time. Alsa or OSS can be runtime selected with -a. Add audio thread support to OSS module. Add polled audio support to alsa module. Removed some debug source code. Date: Sun Jan 15 16:56:04 CET 2012 Release Version 0.3.1 Fix bug: AudioFreeBytes didn't check if audio running/compiled. Fix bug: snd_pcm_state: Assertion `pcm' failed. Add support for fullscreen and fullscreen toogle. Instant update deinterlace configuration changes. Fix subtitle position. Add SVDRP support. Suspend when user is inactive. User Christian Rupper Date: Tue Jan 10 22:33:14 CET 2012 Move objects before $LIBS to avoid link failures with --as-needed. Do not override CFLAGS for video test. Rearrange *FLAGS incl. some minor fixes. Don't override VDRDIR, LIBDIR and TMPDIR in makefile. Don't abuse LDFLAGS in makefile. Define CC in makefile. Include GL/gl.h for the GL_COLOR_BUFFER_BIT definition. VideoInit() needs an argument. User johns Date: Tue Jan 10 22:32:50 CET 2012 Add main menu entry, which suspends the plugin. Add support for resize window. Close window sends "close" as remote key press. Date: Mon Jan 9 22:09:38 CET 2012 Release Version 0.3.0 Add support of resolution dependend video parameters (deint, scale, ...). Add support for recording play back. Add workaround for alsa crash in snd_pcm_prepare. Fix bug: audio crash on exit. Fix build with vdr without yaepg support. Support yaepghd video picture output position change. Date: Sat Jan 7 13:20:07 CET 2012 Release Version 0.2.0 Add support for ac3 audio pass through. Add workaround for alsa not playing hdmi sound. Fix bug: broken device plugin stop and exit. Show transparent cursor to hide cursor. VDPAU: Add color standard support. VDPAU: Add denoise and sharpness support. VDPAU: Add skip chroma deinterlace support. VDPAU: Show OSD only if something is to display, improves performance. VDPAU: Add deinterlace with only 4 surfaces. Date: Thu Jan 4 17:00:00 CET 2012 Release Version 0.1.5 Adds OSS mixer support. Fix bug: audio new stream is not thread safe. New audio driver OSS. Fix bug: needed down sampling of 3/5/6 to 2 channels not reported. Search audio sync inside PES packets, for insane dvb streams. Use only the needed number of surfaces. Date: Thu Dec 29 19:44:43 CET 2011 Release Version 0.1.4 Prepared vdpau noise reduction support. Vdpau also displays a black surface, when no video is available. Fix bug: CodecVideoDecode destroys avpkt. Date: Thu Dec 29 00:55:57 CET 2011 Release Version 0.1.3 Add missing VdpauDecoderDestroy. Cleanup video packet ringbuffer. Allow build without VDPAU. Fix bug: swapped end and start. Support other than "PCM" alsa mixer channels. Date: Sat Dec 24 15:26:27 CET 2011 Release Version 0.1.2 Fix wrong decoder->SurfaceField again. Remove interlaced_frame debug, it can't be used. Fix new video stream never resets, if buffers full. Date: Fri Dec 23 21:31:27 CET 2011 Release Version 0.1.1 Initial VDPAU decoder support. Initial VDPAU output support. Configurable audio delay. Make pts monotonic. Support old libav and ffmpeg libs. Support xcb_icccm_set_wm_protocols with xcb-util <0.3.8. New video/audio sync code. Support xcb-util <0.3.8. Use current configuration for setup menu. Initial support of replay. Workaround for libva-driver-intel 1080i problems. DisplayFrame displays now only a single frame. Add deinterlace/scaling modes to setup. Date: Sat Dec 10 00:06:46 CET 2011 Release Version 0.0.9 Pass audio/video PTS to codec. Fix libva-driver-intel OSD problems. Add audio resample support. Reduce program exit crashes. Add libva-driver-vdpau autodetection. Add workaround for bugs in libva-driver-vdpau. Threaded video display handler written. Add support for unscaled osd (f.e. supported by intel-vaapi) Add support for 16:9 and 4:3 video streams. Add buildin X11 server start. Remove ffmpeg deprecated functions. Disable display of window cursor. Can force self as primary device. Smaller audio buffer. Don't trust ffmpeg or stream interlace flag. Fix Makefile. Add setting analog volume. Date: Thu Dec 1 20:48:35 CET 2011 Release Version 0.0.5 vdr-plugin-softhddevice/ringbuffer.c0000644000175000017500000001647412644034136017452 0ustar tobiastobias/// /// @file ringbuffer.c @brief Ringbuffer module /// /// Copyright (c) 2009, 2011, 2014 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: c9497b197ce7e7a6ba397944edc7ccb161152efd $ ////////////////////////////////////////////////////////////////////////////// /// /// @defgroup Ringbuffer The ring buffer module. /// /// Lock free ring buffer with only one writer and one reader. /// #include #include #include #include "iatomic.h" #include "ringbuffer.h" /// ring buffer structure struct _ring_buffer_ { char *Buffer; ///< ring buffer data const char *BufferEnd; ///< end of buffer size_t Size; ///< bytes in buffer (for faster calc) const char *ReadPointer; ///< only used by reader char *WritePointer; ///< only used by writer /// The only thing modified by both atomic_t Filled; ///< how many of the buffer is used }; /** ** Reset ring buffer pointers. ** ** @param rb Ring buffer to reset read/write pointers. */ void RingBufferReset(RingBuffer * rb) { rb->ReadPointer = rb->Buffer; rb->WritePointer = rb->Buffer; atomic_set(&rb->Filled, 0); } /** ** Allocate a new ring buffer. ** ** @param size Size of the ring buffer. ** ** @returns Allocated ring buffer, must be freed with ** RingBufferDel(), NULL for out of memory. */ RingBuffer *RingBufferNew(size_t size) { RingBuffer *rb; if (!(rb = malloc(sizeof(*rb)))) { // allocate structure return rb; } if (!(rb->Buffer = malloc(size))) { // allocate buffer free(rb); return NULL; } rb->Size = size; rb->BufferEnd = rb->Buffer + size; RingBufferReset(rb); return rb; } /** ** Free an allocated ring buffer. */ void RingBufferDel(RingBuffer * rb) { free(rb->Buffer); free(rb); } /** ** Advance write pointer in ring buffer. ** ** @param rb Ring buffer to advance write pointer. ** @param cnt Number of bytes to be adavanced. ** ** @returns Number of bytes that could be advanced in ring buffer. */ size_t RingBufferWriteAdvance(RingBuffer * rb, size_t cnt) { size_t n; n = rb->Size - atomic_read(&rb->Filled); if (cnt > n) { // not enough space cnt = n; } // // Hitting end of buffer? // n = rb->BufferEnd - rb->WritePointer; if (n > cnt) { // don't cross the end rb->WritePointer += cnt; } else { // reached or cross the end rb->WritePointer = rb->Buffer; if (n < cnt) { n = cnt - n; rb->WritePointer += n; } } // // Only atomic modification! // atomic_add(cnt, &rb->Filled); return cnt; } /** ** Write to a ring buffer. ** ** @param rb Ring buffer to write to. ** @param buf Buffer of @p cnt bytes. ** @param cnt Number of bytes in buffer. ** ** @returns The number of bytes that could be placed in the ring ** buffer. */ size_t RingBufferWrite(RingBuffer * rb, const void *buf, size_t cnt) { size_t n; n = rb->Size - atomic_read(&rb->Filled); if (cnt > n) { // not enough space cnt = n; } // // Hitting end of buffer? // n = rb->BufferEnd - rb->WritePointer; if (n > cnt) { // don't cross the end memcpy(rb->WritePointer, buf, cnt); rb->WritePointer += cnt; } else { // reached or cross the end memcpy(rb->WritePointer, buf, n); rb->WritePointer = rb->Buffer; if (n < cnt) { buf += n; n = cnt - n; memcpy(rb->WritePointer, buf, n); rb->WritePointer += n; } } // // Only atomic modification! // atomic_add(cnt, &rb->Filled); return cnt; } /** ** Get write pointer and free bytes at this position of ring buffer. ** ** @param rb Ring buffer to write to. ** @param[out] wp Write pointer is placed here ** ** @returns The number of bytes that could be placed in the ring ** buffer at the write pointer. */ size_t RingBufferGetWritePointer(RingBuffer * rb, void **wp) { size_t n; size_t cnt; // Total free bytes available in ring buffer cnt = rb->Size - atomic_read(&rb->Filled); *wp = rb->WritePointer; // // Hitting end of buffer? // n = rb->BufferEnd - rb->WritePointer; if (n <= cnt) { // reached or cross the end return n; } return cnt; } /** ** Advance read pointer in ring buffer. ** ** @param rb Ring buffer to advance read pointer. ** @param cnt Number of bytes to be advanced. ** ** @returns Number of bytes that could be advanced in ring buffer. */ size_t RingBufferReadAdvance(RingBuffer * rb, size_t cnt) { size_t n; n = atomic_read(&rb->Filled); if (cnt > n) { // not enough filled cnt = n; } // // Hitting end of buffer? // n = rb->BufferEnd - rb->ReadPointer; if (n > cnt) { // don't cross the end rb->ReadPointer += cnt; } else { // reached or cross the end rb->ReadPointer = rb->Buffer; if (n < cnt) { n = cnt - n; rb->ReadPointer += n; } } // // Only atomic modification! // atomic_sub(cnt, &rb->Filled); return cnt; } /** ** Read from a ring buffer. ** ** @param rb Ring buffer to read from. ** @param buf Buffer of @p cnt bytes. ** @param cnt Number of bytes to be read. ** ** @returns Number of bytes that could be read from ring buffer. */ size_t RingBufferRead(RingBuffer * rb, void *buf, size_t cnt) { size_t n; n = atomic_read(&rb->Filled); if (cnt > n) { // not enough filled cnt = n; } // // Hitting end of buffer? // n = rb->BufferEnd - rb->ReadPointer; if (n > cnt) { // don't cross the end memcpy(buf, rb->ReadPointer, cnt); rb->ReadPointer += cnt; } else { // reached or cross the end memcpy(buf, rb->ReadPointer, n); rb->ReadPointer = rb->Buffer; if (n < cnt) { buf += n; n = cnt - n; memcpy(buf, rb->ReadPointer, n); rb->ReadPointer += n; } } // // Only atomic modification! // atomic_sub(cnt, &rb->Filled); return cnt; } /** ** Get read pointer and used bytes at this position of ring buffer. ** ** @param rb Ring buffer to read from. ** @param[out] rp Read pointer is placed here ** ** @returns The number of bytes that could be read from the ring ** buffer at the read pointer. */ size_t RingBufferGetReadPointer(RingBuffer * rb, const void **rp) { size_t n; size_t cnt; // Total used bytes in ring buffer cnt = atomic_read(&rb->Filled); *rp = rb->ReadPointer; // // Hitting end of buffer? // n = rb->BufferEnd - rb->ReadPointer; if (n <= cnt) { // reached or cross the end return n; } return cnt; } /** ** Get free bytes in ring buffer. ** ** @param rb Ring buffer. ** ** @returns Number of bytes free in buffer. */ size_t RingBufferFreeBytes(RingBuffer * rb) { return rb->Size - atomic_read(&rb->Filled); } /** ** Get used bytes in ring buffer. ** ** @param rb Ring buffer. ** ** @returns Number of bytes used in buffer. */ size_t RingBufferUsedBytes(RingBuffer * rb) { return atomic_read(&rb->Filled); } vdr-plugin-softhddevice/Todo0000644000175000017500000001154212644034136015774 0ustar tobiastobias@file Todo @brief A software HD output device for VDR Copyright (c) 2011 - 2013 by Johns. All Rights Reserved. Contributor(s): License: AGPLv3 This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. $Id: $ missing: documentation of the PIP hotkeys. svdrp help page missing PIP hotkeys. svdrp stat: add X11 crashed status. more software deinterlace (yadif, ...) more software decoder with software deinterlace suspend output / energie saver: stop and restart X11 suspend plugin didn't restore full-screen (is this wanted?) Option deinterlace off / deinterlace force! ColorSpace aren't configurable with the gui. works for me: restart vdr not working, when started x11 was killed. video: subtitle not cleared subtitle could be asyncron grab image with hardware and better scaling support yaepghd changed position is lost on channel switch pause (live tv) has sometime problems with SAT1 HD Pro7 HD radio show black background radio no need to wait on video buffers starting with radio and own X11 server, shows no video some low-bandwidth tv channels have hiccups. check start with 24Hz display rate crash with ffmpeg without vaapi and vdpau. still-picture of PES recordings should use VideoMpegEnqueue. convert PIX_FMT_... PixelFormat to new names AV_PIX_FMT_..., AVPixelFormat. atmo service support 3D grab no warnings during still picture vdpau: software deinterlace path not working. OSD looses transparency, during channel switch. OSD looses transparency, while moving cut marks. ffmpeg >=1.2 supports same API like VA-API. libva: yaepghd (VaapiSetOutputPosition) support can associate only displayed part of osd grab image for va-api remove stderr output of libva init still many: (workaround export NO_MPEG_HW=1) [drm:i915_hangcheck_elapsed] *ERROR* Hangcheck timer elapsed... GPU hung [drm:i915_wait_request] *ERROR* i915_wait_request returns -11 ... missing OSD support for 3d SBS / Top-Bottom streams, like VPDAU. PIP support / multistream handling VA-AP VaapiCleanup crash after channel without video. libva: branch vaapi-ext / staging add support for vaapi-ext / staging libva-intel-driver: deinterlace only supported with vaapi-ext 1080i does no v-sync (sometimes correct working with vaapi-ext) OSD has sometimes wrong size (workaround written) sometimes software decoder deinterlace isn't working and 1080i channels show artefacts libva-vdpau-driver: G210/GT520 OSD update too slow (needs hardware problem workaround) hangup on exit (VaapiDelDecoder -> VaapiCleanup -> vaDestroyContext -> pthread_rwlock_wrlock) OSD still has some problems with auto-crop and 4:3 zoom. libva-xvba-driver: x11: skip multiple configure-notify, handle only the last one. support embedded mode audio: Make alsa thread/polled and oss thread/polled output module runtime selectable. Mute should do a real mute and not only set volume to zero. Starting suspended and muted, didn't register the mute. Relaxed audio sync checks at end of packet and already in sync samplerate problem resume/suspend. only wait for video start, if video is running. Not primary device, don't use and block audio/video. multiple open of audio device, reduce them. Not all channel conversions are written (f.e. 2->3 ... 5->6 ...) audio/alsa: remix support of unsupported sample rates audio/oss: alsa oss emulation mixer "pcm" not working oss4 mixer channel not working ring buffer overflow with alsa oss emulation HDMI/SPDIF Passthrough: only AC-3 written playback of recording pause is not reset, when replay exit (fixed?) replay/pause need 100% cpu (fixed?) plugins: mp3 plugin needs 100% cpu (bad ::Poll) setup: Setup of decoder type. Setup of output type. Some setup parameters are not used until restart. Can a notice be added to the setup menu? unsorted: stoping vdr while plugin is suspended opens and closes a window. svdrp prim: support plugin names for device numbers. Workaround exists: hangup PipVideoStream -> Vdpau_get_format -> xcb -> poll + lock DecoderLockMutex check compiletime and runtime ffmpeg/libav version during init. future features (not planed for 1.0 - 1.5) video out with xv video out with opengl software decoder for xv / opengl save and use auto-crop with channel zapping upmix stereo to AC-3 (supported by alsa plugin) vdr-plugin-softhddevice/video.c0000644000175000017500000120022312644034136016413 0ustar tobiastobias/// /// @file video.c @brief Video module /// /// Copyright (c) 2009 - 2015 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: d9534b488d56e3efa4d92a0af07e715a05766e2e $ ////////////////////////////////////////////////////////////////////////////// /// /// @defgroup Video The video module. /// /// This module contains all video rendering functions. /// /// @todo disable screen saver support /// /// Uses Xlib where it is needed for VA-API or vdpau. XCB is used for /// everything else. /// /// - X11 /// - OpenGL rendering /// - OpenGL rendering with GLX texture-from-pixmap /// - Xrender rendering /// /// @todo FIXME: use vaErrorStr for all VA-API errors. /// #define USE_XLIB_XCB ///< use xlib/xcb backend #define noUSE_SCREENSAVER ///< support disable screensaver #define USE_AUTOCROP ///< compile auto-crop support #define USE_GRAB ///< experimental grab code #define noUSE_GLX ///< outdated GLX code #define USE_DOUBLEBUFFER ///< use GLX double buffers //#define USE_VAAPI ///< enable vaapi support //#define USE_VDPAU ///< enable vdpau support //#define USE_BITMAP ///< use vdpau bitmap surface //#define AV_INFO ///< log a/v sync informations #ifndef AV_INFO_TIME #define AV_INFO_TIME (50 * 60) ///< a/v info every minute #endif #define USE_VIDEO_THREAD ///< run decoder in an own thread //#define USE_VIDEO_THREAD2 ///< run decoder+display in own threads #include #include #include #include #include #include #include #include #include #include #define _(str) gettext(str) ///< gettext shortcut #define _N(str) str ///< gettext_noop shortcut #ifdef USE_VIDEO_THREAD #ifndef __USE_GNU #define __USE_GNU #endif #include #include #include #ifndef HAVE_PTHREAD_NAME /// only available with newer glibc #define pthread_setname_np(thread, name) #endif #endif #ifdef USE_XLIB_XCB #include #include #include #include #include //#include #ifdef xcb_USE_GLX #include #endif //#include #ifdef USE_SCREENSAVER #include #include #endif //#include //#include //#include //#include //#include #include #ifdef XCB_ICCCM_NUM_WM_SIZE_HINTS_ELEMENTS #include #else // compatibility hack for old xcb-util /** * @brief Action on the _NET_WM_STATE property */ typedef enum { /* Remove/unset property */ XCB_EWMH_WM_STATE_REMOVE = 0, /* Add/set property */ XCB_EWMH_WM_STATE_ADD = 1, /* Toggle property */ XCB_EWMH_WM_STATE_TOGGLE = 2 } xcb_ewmh_wm_state_action_t; #endif #endif #ifdef USE_GLX #include // For GL_COLOR_BUFFER_BIT #include // only for gluErrorString #include #endif #ifdef USE_VAAPI #include #if VA_CHECK_VERSION(0,33,99) #include #endif #ifdef USE_GLX #include #endif #ifndef VA_SURFACE_ATTRIB_SETTABLE /// make source compatible with stable libva #define vaCreateSurfaces(d, f, w, h, s, ns, a, na) \ vaCreateSurfaces(d, w, h, f, ns, s) #endif #endif #ifdef USE_VDPAU #include #include #endif #include // support old ffmpeg versions <1.0 #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,18,102) #define AVCodecID CodecID #define AV_CODEC_ID_H263 CODEC_ID_H263 #define AV_CODEC_ID_H264 CODEC_ID_H264 #define AV_CODEC_ID_MPEG1VIDEO CODEC_ID_MPEG1VIDEO #define AV_CODEC_ID_MPEG2VIDEO CODEC_ID_MPEG2VIDEO #define AV_CODEC_ID_MPEG4 CODEC_ID_MPEG4 #define AV_CODEC_ID_VC1 CODEC_ID_VC1 #define AV_CODEC_ID_WMV3 CODEC_ID_WMV3 #endif #include #include #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54,86,100) /// /// ffmpeg version 1.1.1 calls get_format with zero width and height /// for H264 codecs. /// since version 1.1.3 get_format is called twice. /// ffmpeg 1.2 still buggy /// #define FFMPEG_BUG1_WORKAROUND ///< get_format bug workaround #endif #include "iatomic.h" // portable atomic_t #include "misc.h" #include "video.h" #include "audio.h" #ifdef USE_XLIB_XCB //---------------------------------------------------------------------------- // Declarations //---------------------------------------------------------------------------- /// /// Video resolutions selector. /// typedef enum _video_resolutions_ { VideoResolution576i, ///< ...x576 interlaced VideoResolution720p, ///< ...x720 progressive VideoResolutionFake1080i, ///< 1280x1080 1440x1080 interlaced VideoResolution1080i, ///< 1920x1080 interlaced VideoResolutionMax ///< number of resolution indexs } VideoResolutions; /// /// Video deinterlace modes. /// typedef enum _video_deinterlace_modes_ { VideoDeinterlaceBob, ///< bob deinterlace VideoDeinterlaceWeave, ///< weave deinterlace VideoDeinterlaceTemporal, ///< temporal deinterlace VideoDeinterlaceTemporalSpatial, ///< temporal spatial deinterlace VideoDeinterlaceSoftBob, ///< software bob deinterlace VideoDeinterlaceSoftSpatial, ///< software spatial deinterlace } VideoDeinterlaceModes; /// /// Video scaleing modes. /// typedef enum _video_scaling_modes_ { VideoScalingNormal, ///< normal scaling VideoScalingFast, ///< fastest scaling VideoScalingHQ, ///< high quality scaling VideoScalingAnamorphic, ///< anamorphic scaling } VideoScalingModes; /// /// Video zoom modes. /// typedef enum _video_zoom_modes_ { VideoNormal, ///< normal VideoStretch, ///< stretch to all edges VideoCenterCutOut, ///< center and cut out VideoAnamorphic, ///< anamorphic scaled (unsupported) } VideoZoomModes; /// /// Video color space conversions. /// typedef enum _video_color_space_ { VideoColorSpaceNone, ///< no conversion VideoColorSpaceBt601, ///< ITU.BT-601 Y'CbCr VideoColorSpaceBt709, ///< ITU.BT-709 HDTV Y'CbCr VideoColorSpaceSmpte240 ///< SMPTE-240M Y'PbPr } VideoColorSpace; /// /// Video output module structure and typedef. /// typedef struct _video_module_ { const char *Name; ///< video output module name char Enabled; ///< flag output module enabled /// allocate new video hw decoder VideoHwDecoder *(*const NewHwDecoder)(VideoStream *); void (*const DelHwDecoder) (VideoHwDecoder *); unsigned (*const GetSurface) (VideoHwDecoder *, const AVCodecContext *); void (*const ReleaseSurface) (VideoHwDecoder *, unsigned); enum PixelFormat (*const get_format) (VideoHwDecoder *, AVCodecContext *, const enum PixelFormat *); void (*const RenderFrame) (VideoHwDecoder *, const AVCodecContext *, const AVFrame *); void *(*const GetHwAccelContext)(VideoHwDecoder *); void (*const SetClock) (VideoHwDecoder *, int64_t); int64_t(*const GetClock) (const VideoHwDecoder *); void (*const SetClosing) (const VideoHwDecoder *); void (*const ResetStart) (const VideoHwDecoder *); void (*const SetTrickSpeed) (const VideoHwDecoder *, int); uint8_t *(*const GrabOutput)(int *, int *, int *); void (*const GetStats) (VideoHwDecoder *, int *, int *, int *, int *); void (*const SetBackground) (uint32_t); void (*const SetVideoMode) (void); void (*const ResetAutoCrop) (void); /// module display handler thread void (*const DisplayHandlerThread) (void); void (*const OsdClear) (void); ///< clear OSD /// draw OSD ARGB area void (*const OsdDrawARGB) (int, int, int, int, int, const uint8_t *, int, int); void (*const OsdInit) (int, int); ///< initialize OSD void (*const OsdExit) (void); ///< cleanup OSD int (*const Init) (const char *); ///< initialize video output module void (*const Exit) (void); ///< cleanup video output module } VideoModule; //---------------------------------------------------------------------------- // Defines //---------------------------------------------------------------------------- #define CODEC_SURFACES_MAX 31 ///< maximal of surfaces #define CODEC_SURFACES_DEFAULT 21 ///< default of surfaces // FIXME: video-xvba only supports 14 #define xCODEC_SURFACES_DEFAULT 14 ///< default of surfaces #define CODEC_SURFACES_MPEG2 3 ///< 1 decode, up to 2 references #define CODEC_SURFACES_MPEG4 3 ///< 1 decode, up to 2 references #define CODEC_SURFACES_H264 21 ///< 1 decode, up to 20 references #define CODEC_SURFACES_VC1 3 ///< 1 decode, up to 2 references #define VIDEO_SURFACES_MAX 4 ///< video output surfaces for queue #define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- char VideoIgnoreRepeatPict; ///< disable repeat pict warning static const char *VideoDriverName; ///< video output device static Display *XlibDisplay; ///< Xlib X11 display static xcb_connection_t *Connection; ///< xcb connection static xcb_colormap_t VideoColormap; ///< video colormap static xcb_window_t VideoWindow; ///< video window static xcb_screen_t const *VideoScreen; ///< video screen static uint32_t VideoBlankTick; ///< blank cursor timer static xcb_pixmap_t VideoCursorPixmap; ///< blank curosr pixmap static xcb_cursor_t VideoBlankCursor; ///< empty invisible cursor static int VideoWindowX; ///< video output window x coordinate static int VideoWindowY; ///< video outout window y coordinate static unsigned VideoWindowWidth; ///< video output window width static unsigned VideoWindowHeight; ///< video output window height static const VideoModule NoopModule; ///< forward definition of noop module /// selected video module static const VideoModule *VideoUsedModule = &NoopModule; signed char VideoHardwareDecoder = -1; ///< flag use hardware decoder static char VideoSurfaceModesChanged; ///< flag surface modes changed /// flag use transparent OSD. static const char VideoTransparentOsd = 1; static uint32_t VideoBackground; ///< video background color static char VideoStudioLevels; ///< flag use studio levels /// Default deinterlace mode. static VideoDeinterlaceModes VideoDeinterlace[VideoResolutionMax]; /// Default number of deinterlace surfaces static const int VideoDeinterlaceSurfaces = 4; /// Default skip chroma deinterlace flag (VDPAU only). static char VideoSkipChromaDeinterlace[VideoResolutionMax]; /// Default inverse telecine flag (VDPAU only). static char VideoInverseTelecine[VideoResolutionMax]; /// Default amount of noise reduction algorithm to apply (0 .. 1000). static int VideoDenoise[VideoResolutionMax]; /// Default amount of sharpening, or blurring, to apply (-1000 .. 1000). static int VideoSharpen[VideoResolutionMax]; /// Default cut top and bottom in pixels static int VideoCutTopBottom[VideoResolutionMax]; /// Default cut left and right in pixels static int VideoCutLeftRight[VideoResolutionMax]; /// Color space ITU-R BT.601, ITU-R BT.709, ... static const VideoColorSpace VideoColorSpaces[VideoResolutionMax] = { VideoColorSpaceBt601, VideoColorSpaceBt709, VideoColorSpaceBt709, VideoColorSpaceBt709 }; /// Default scaling mode static VideoScalingModes VideoScaling[VideoResolutionMax]; /// Default audio/video delay int VideoAudioDelay; /// Default zoom mode for 4:3 static VideoZoomModes Video4to3ZoomMode; /// Default zoom mode for 16:9 and others static VideoZoomModes VideoOtherZoomMode; static char Video60HzMode; ///< handle 60hz displays static char VideoSoftStartSync; ///< soft start sync audio/video static const int VideoSoftStartFrames = 100; ///< soft start frames static char VideoShowBlackPicture; ///< flag show black picture static xcb_atom_t WmDeleteWindowAtom; ///< WM delete message atom static xcb_atom_t NetWmState; ///< wm-state message atom static xcb_atom_t NetWmStateFullscreen; ///< fullscreen wm-state message atom #ifdef DEBUG extern uint32_t VideoSwitch; ///< ticks for channel switch #endif extern void AudioVideoReady(int64_t); ///< tell audio video is ready #ifdef USE_VIDEO_THREAD static pthread_t VideoThread; ///< video decode thread static pthread_cond_t VideoWakeupCond; ///< wakeup condition variable static pthread_mutex_t VideoMutex; ///< video condition mutex static pthread_mutex_t VideoLockMutex; ///< video lock mutex #endif #ifdef USE_VIDEO_THREAD2 static pthread_t VideoDisplayThread; ///< video decode thread static pthread_cond_t VideoWakeupCond; ///< wakeup condition variable static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex #endif static int OsdConfigWidth; ///< osd configured width static int OsdConfigHeight; ///< osd configured height static char OsdShown; ///< flag show osd static char Osd3DMode; ///< 3D OSD mode static int OsdWidth; ///< osd width static int OsdHeight; ///< osd height static int OsdDirtyX; ///< osd dirty area x static int OsdDirtyY; ///< osd dirty area y static int OsdDirtyWidth; ///< osd dirty area width static int OsdDirtyHeight; ///< osd dirty area height static int64_t VideoDeltaPTS; ///< FIXME: fix pts #ifdef USE_SCREENSAVER static char DPMSDisabled; ///< flag we have disabled dpms static char EnableDPMSatBlackScreen; ///< flag we should enable dpms at black screen #endif //---------------------------------------------------------------------------- // Common Functions //---------------------------------------------------------------------------- static void VideoThreadLock(void); ///< lock video thread static void VideoThreadUnlock(void); ///< unlock video thread static void VideoThreadExit(void); ///< exit/kill video thread #ifdef USE_SCREENSAVER static void X11SuspendScreenSaver(xcb_connection_t *, int); static int X11HaveDPMS(xcb_connection_t *); static void X11DPMSReenable(xcb_connection_t *); static void X11DPMSDisable(xcb_connection_t *); #endif /// /// Update video pts. /// /// @param pts_p pointer to pts /// @param interlaced interlaced flag (frame isn't right) /// @param frame frame to display /// /// @note frame->interlaced_frame can't be used for interlace detection /// static void VideoSetPts(int64_t * pts_p, int interlaced, const AVCodecContext * video_ctx, const AVFrame * frame) { int64_t pts; int duration; // // Get duration for this frame. // FIXME: using framerate as workaround for av_frame_get_pkt_duration // #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56,13,100) // version for older ffmpeg without framerate if (video_ctx->time_base.num && video_ctx->time_base.den) { duration = (video_ctx->ticks_per_frame * 1000 * video_ctx->time_base.num) / video_ctx->time_base.den; } else { duration = interlaced ? 40 : 20; // 50Hz -> 20ms default } Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->time_base.den, video_ctx->time_base.num, av_frame_get_pkt_duration(frame), duration); #else if (video_ctx->framerate.num && video_ctx->framerate.den) { duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num; } else { duration = interlaced ? 40 : 20; // 50Hz -> 20ms default } Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration); #endif // update video clock if (*pts_p != (int64_t) AV_NOPTS_VALUE) { *pts_p += duration * 90; //Info("video: %s +pts\n", Timestamp2String(*pts_p)); } //av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp"); //pts = frame->best_effort_timestamp; pts = frame->pkt_pts; if (pts == (int64_t) AV_NOPTS_VALUE || !pts) { // libav: 0.8pre didn't set pts pts = frame->pkt_dts; } // libav: sets only pkt_dts which can be 0 if (pts && pts != (int64_t) AV_NOPTS_VALUE) { // build a monotonic pts if (*pts_p != (int64_t) AV_NOPTS_VALUE) { int64_t delta; delta = pts - *pts_p; // ignore negative jumps if (delta > -600 * 90 && delta <= -40 * 90) { if (-delta > VideoDeltaPTS) { VideoDeltaPTS = -delta; Debug(4, "video: %#012" PRIx64 "->%#012" PRIx64 " delta%+4" PRId64 " pts\n", *pts_p, pts, pts - *pts_p); } return; } } else { // first new clock value AudioVideoReady(pts); } if (*pts_p != pts) { Debug(4, "video: %#012" PRIx64 "->%#012" PRIx64 " delta=%4" PRId64 " pts\n", *pts_p, pts, pts - *pts_p); *pts_p = pts; } } } /// /// Update output for new size or aspect ratio. /// /// @param input_aspect_ratio video stream aspect /// static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, int input_height, VideoResolutions resolution, int video_x, int video_y, int video_width, int video_height, int *output_x, int *output_y, int *output_width, int *output_height, int *crop_x, int *crop_y, int *crop_width, int *crop_height) { AVRational display_aspect_ratio; AVRational tmp_ratio; if (!input_aspect_ratio.num || !input_aspect_ratio.den) { input_aspect_ratio.num = 1; input_aspect_ratio.den = 1; Debug(3, "video: aspect defaults to %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den); } av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, input_width * input_aspect_ratio.num, input_height * input_aspect_ratio.den, 1024 * 1024); // InputWidth/Height can be zero = uninitialized if (!input_aspect_ratio.num || !input_aspect_ratio.den) { input_aspect_ratio.num = 1; input_aspect_ratio.den = 1; } display_aspect_ratio.num = VideoScreen->width_in_pixels * VideoScreen->height_in_millimeters; display_aspect_ratio.den = VideoScreen->height_in_pixels * VideoScreen->width_in_millimeters; display_aspect_ratio = av_mul_q(input_aspect_ratio, display_aspect_ratio); Debug(3, "video: aspect %d:%d\n", display_aspect_ratio.num, display_aspect_ratio.den); *crop_x = VideoCutLeftRight[resolution]; *crop_y = VideoCutTopBottom[resolution]; *crop_width = input_width - VideoCutLeftRight[resolution] * 2; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; // FIXME: store different positions for the ratios tmp_ratio.num = 4; tmp_ratio.den = 3; #ifdef DEBUG fprintf(stderr, "ratio: %d:%d %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den, display_aspect_ratio.num, display_aspect_ratio.den); #endif if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) { switch (Video4to3ZoomMode) { case VideoNormal: goto normal; case VideoStretch: goto stretch; case VideoCenterCutOut: goto center_cut_out; case VideoAnamorphic: // FIXME: rest should be done by hardware goto stretch; } } switch (VideoOtherZoomMode) { case VideoNormal: goto normal; case VideoStretch: goto stretch; case VideoCenterCutOut: goto center_cut_out; case VideoAnamorphic: // FIXME: rest should be done by hardware goto stretch; } normal: *output_x = video_x; *output_y = video_y; *output_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den; *output_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num; if (*output_width > video_width) { *output_width = video_width; *output_y += (video_height - *output_height) / 2; } else if (*output_height > video_height) { *output_height = video_height; *output_x += (video_width - *output_width) / 2; } Debug(3, "video: aspect output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y); return; stretch: *output_x = video_x; *output_y = video_y; *output_width = video_width; *output_height = video_height; Debug(3, "video: stretch output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y); return; center_cut_out: *output_x = video_x; *output_y = video_y; *output_height = video_height; *output_width = video_width; *crop_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den; *crop_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num; // look which side must be cut if (*crop_width > video_width) { int tmp; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; // adjust scaling tmp = ((*crop_width - video_width) * input_width) / (2 * video_width); // FIXME: round failure? if (tmp > *crop_x) { *crop_x = tmp; } *crop_width = input_width - *crop_x * 2; } else if (*crop_height > video_height) { int tmp; *crop_width = input_width - VideoCutLeftRight[resolution] * 2; // adjust scaling tmp = ((*crop_height - video_height) * input_height) / (2 * video_height); // FIXME: round failure? if (tmp > *crop_y) { *crop_y = tmp; } *crop_height = input_height - *crop_y * 2; } else { *crop_width = input_width - VideoCutLeftRight[resolution] * 2; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; } Debug(3, "video: aspect crop %dx%d%+d%+d\n", *crop_width, *crop_height, *crop_x, *crop_y); return; } //---------------------------------------------------------------------------- // GLX //---------------------------------------------------------------------------- #ifdef USE_GLX static int GlxEnabled; ///< use GLX static int GlxVSyncEnabled; ///< enable/disable v-sync static GLXContext GlxSharedContext; ///< shared gl context static GLXContext GlxContext; ///< our gl context #ifdef USE_VIDEO_THREAD static GLXContext GlxThreadContext; ///< our gl context for the thread #endif static XVisualInfo *GlxVisualInfo; ///< our gl visual static GLuint OsdGlTextures[2]; ///< gl texture for OSD static int OsdIndex; ///< index into OsdGlTextures /// /// GLX extension functions ///@{ #ifdef GLX_MESA_swap_control static PFNGLXSWAPINTERVALMESAPROC GlxSwapIntervalMESA; #endif #ifdef GLX_SGI_video_sync static PFNGLXGETVIDEOSYNCSGIPROC GlxGetVideoSyncSGI; #endif #ifdef GLX_SGI_swap_control static PFNGLXSWAPINTERVALSGIPROC GlxSwapIntervalSGI; #endif ///@} /// /// GLX check error. /// static void GlxCheck(void) { GLenum err; if ((err = glGetError()) != GL_NO_ERROR) { Debug(3, "video/glx: error %d '%s'\n", err, gluErrorString(err)); } } /// /// GLX check if a GLX extension is supported. /// /// @param ext extension to query /// @returns true if supported, false otherwise /// static int GlxIsExtensionSupported(const char *ext) { const char *extensions; if ((extensions = glXQueryExtensionsString(XlibDisplay, DefaultScreen(XlibDisplay)))) { const char *s; int l; s = strstr(extensions, ext); l = strlen(ext); return s && (s[l] == ' ' || s[l] == '\0'); } return 0; } /// /// Setup GLX decoder /// /// @param width input video textures width /// @param height input video textures height /// @param[OUT] textures created and prepared textures /// static void GlxSetupDecoder(int width, int height, GLuint * textures) { int i; glEnable(GL_TEXTURE_2D); // create 2d texture glGenTextures(2, textures); GlxCheck(); for (i = 0; i < 2; ++i) { glBindTexture(GL_TEXTURE_2D, textures[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glPixelStorei(GL_UNPACK_ALIGNMENT, 4); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); glBindTexture(GL_TEXTURE_2D, 0); } glDisable(GL_TEXTURE_2D); GlxCheck(); } /// /// Render texture. /// /// @param texture 2d texture /// @param x window x /// @param y window y /// @param width window width /// @param height window height /// static inline void GlxRenderTexture(GLuint texture, int x, int y, int width, int height) { glEnable(GL_TEXTURE_2D); glBindTexture(GL_TEXTURE_2D, texture); glColor4f(1.0f, 1.0f, 1.0f, 1.0f); // no color glBegin(GL_QUADS); { glTexCoord2f(1.0f, 1.0f); glVertex2i(x + width, y + height); glTexCoord2f(0.0f, 1.0f); glVertex2i(x, y + height); glTexCoord2f(0.0f, 0.0f); glVertex2i(x, y); glTexCoord2f(1.0f, 0.0f); glVertex2i(x + width, y); } glEnd(); glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// Upload OSD texture. /// /// @param x x coordinate texture /// @param y y coordinate texture /// @param width argb image width /// @param height argb image height /// @param argb argb image /// static void GlxUploadOsdTexture(int x, int y, int width, int height, const uint8_t * argb) { // FIXME: use other / faster uploads // ARB_pixelbuffer_object GL_PIXEL_UNPACK_BUFFER glBindBufferARB() // glMapBuffer() glUnmapBuffer() glEnable(GL_TEXTURE_2D); // upload 2d texture glBindTexture(GL_TEXTURE_2D, OsdGlTextures[OsdIndex]); glTexSubImage2D(GL_TEXTURE_2D, 0, x, y, width, height, GL_BGRA, GL_UNSIGNED_BYTE, argb); glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// GLX initialize OSD. /// /// @param width osd width /// @param height osd height /// static void GlxOsdInit(int width, int height) { int i; #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } #endif Debug(3, "video/glx: osd init context %p <-> %p\n", glXGetCurrentContext(), GlxContext); // // create a RGBA texture. // glEnable(GL_TEXTURE_2D); // create 2d texture(s) glGenTextures(2, OsdGlTextures); for (i = 0; i < 2; ++i) { glBindTexture(GL_TEXTURE_2D, OsdGlTextures[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glPixelStorei(GL_UNPACK_ALIGNMENT, 4); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); } glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// GLX cleanup osd. /// static void GlxOsdExit(void) { if (OsdGlTextures[0]) { glDeleteTextures(2, OsdGlTextures); OsdGlTextures[0] = 0; OsdGlTextures[1] = 0; } } /// /// Upload ARGB image to texture. /// /// @param xi x-coordinate in argb image /// @param yi y-coordinate in argb image /// @paran height height in pixel in argb image /// @paran width width in pixel in argb image /// @param pitch pitch of argb image /// @param argb 32bit ARGB image data /// @param x x-coordinate on screen of argb image /// @param y y-coordinate on screen of argb image /// /// @note looked by caller /// static void GlxOsdDrawARGB(int xi, int yi, int width, int height, int pitch, const uint8_t * argb, int x, int y) { uint8_t *tmp; #ifdef DEBUG uint32_t start; uint32_t end; #endif #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } start = GetMsTicks(); Debug(3, "video/glx: osd context %p <-> %p\n", glXGetCurrentContext(), GlxContext); #endif // set glx context if (!glXMakeCurrent(XlibDisplay, VideoWindow, GlxContext)) { Error(_("video/glx: can't make glx context current\n")); return; } // FIXME: faster way tmp = malloc(width * height * 4); if (tmp) { int i; for (i = 0; i < height; ++i) { memcpy(tmp + i * width * 4, argb + xi * 4 + (i + yi) * pitch, width * 4); } GlxUploadOsdTexture(x, y, width, height, tmp); glXMakeCurrent(XlibDisplay, None, NULL); free(tmp); } #ifdef DEBUG end = GetMsTicks(); Debug(3, "video/glx: osd upload %dx%d%+d%+d %dms %d\n", width, height, x, y, end - start, width * height * 4); #endif } /// /// Clear OSD texture. /// /// @note looked by caller /// static void GlxOsdClear(void) { void *texbuf; #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } Debug(3, "video/glx: osd context %p <-> %p\n", glXGetCurrentContext(), GlxContext); #endif // FIXME: any opengl function to clear an area? // FIXME: if not; use zero buffer // FIXME: if not; use dirty area // set glx context if (!glXMakeCurrent(XlibDisplay, VideoWindow, GlxContext)) { Error(_("video/glx: can't make glx context current\n")); return; } texbuf = calloc(OsdWidth * OsdHeight, 4); GlxUploadOsdTexture(0, 0, OsdWidth, OsdHeight, texbuf); glXMakeCurrent(XlibDisplay, None, NULL); free(texbuf); } /// /// Setup GLX window. /// /// @param window xcb window id /// @param width window width /// @param height window height /// @param context GLX context /// static void GlxSetupWindow(xcb_window_t window, int width, int height, GLXContext context) { #ifdef DEBUG uint32_t start; uint32_t end; int i; unsigned count; #endif Debug(3, "video/glx: %s %x %dx%d context:%p", __FUNCTION__, window, width, height, context); // set glx context if (!glXMakeCurrent(XlibDisplay, window, context)) { Error(_("video/glx: can't make glx context current\n")); GlxEnabled = 0; return; } Debug(3, "video/glx: ok\n"); #ifdef DEBUG // check if v-sync is working correct end = GetMsTicks(); for (i = 0; i < 10; ++i) { start = end; glClear(GL_COLOR_BUFFER_BIT); glXSwapBuffers(XlibDisplay, window); end = GetMsTicks(); GlxGetVideoSyncSGI(&count); Debug(3, "video/glx: %5d frame rate %dms\n", count, end - start); // nvidia can queue 5 swaps if (i > 5 && (end - start) < 15) { Warning(_("video/glx: no v-sync\n")); } } #endif // viewpoint GlxCheck(); glViewport(0, 0, width, height); glDepthRange(-1.0, 1.0); glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glColor3f(1.0f, 1.0f, 1.0f); glClearDepth(1.0); GlxCheck(); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, width, height, 0.0, -1.0, 1.0); GlxCheck(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_DEPTH_TEST); // setup 2d drawing glDepthMask(GL_FALSE); glDisable(GL_CULL_FACE); #ifdef USE_DOUBLEBUFFER glDrawBuffer(GL_BACK); #else glDrawBuffer(GL_FRONT); #endif glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); #ifdef DEBUG #ifdef USE_DOUBLEBUFFER glDrawBuffer(GL_FRONT); glClearColor(1.0f, 0.0f, 1.0f, 1.0f); glClear(GL_COLOR_BUFFER_BIT); glDrawBuffer(GL_BACK); #endif #endif // clear glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // intial background color glClear(GL_COLOR_BUFFER_BIT); #ifdef DEBUG glClearColor(1.0f, 1.0f, 0.0f, 1.0f); // background color #endif GlxCheck(); } /// /// Initialize GLX. /// static void GlxInit(void) { static GLint visual_attr[] = { GLX_RGBA, GLX_RED_SIZE, 8, GLX_GREEN_SIZE, 8, GLX_BLUE_SIZE, 8, #ifdef USE_DOUBLEBUFFER GLX_DOUBLEBUFFER, #endif None }; XVisualInfo *vi; GLXContext context; int major; int minor; int glx_GLX_EXT_swap_control; int glx_GLX_MESA_swap_control; int glx_GLX_SGI_swap_control; int glx_GLX_SGI_video_sync; if (!glXQueryVersion(XlibDisplay, &major, &minor)) { Error(_("video/glx: no GLX support\n")); GlxEnabled = 0; return; } Info(_("video/glx: glx version %d.%d\n"), major, minor); // // check which extension are supported // glx_GLX_EXT_swap_control = GlxIsExtensionSupported("GLX_EXT_swap_control"); glx_GLX_MESA_swap_control = GlxIsExtensionSupported("GLX_MESA_swap_control"); glx_GLX_SGI_swap_control = GlxIsExtensionSupported("GLX_SGI_swap_control"); glx_GLX_SGI_video_sync = GlxIsExtensionSupported("GLX_SGI_video_sync"); #ifdef GLX_MESA_swap_control if (glx_GLX_MESA_swap_control) { GlxSwapIntervalMESA = (PFNGLXSWAPINTERVALMESAPROC) glXGetProcAddress((const GLubyte *)"glXSwapIntervalMESA"); } Debug(3, "video/glx: GlxSwapIntervalMESA=%p\n", GlxSwapIntervalMESA); #endif #ifdef GLX_SGI_swap_control if (glx_GLX_SGI_swap_control) { GlxSwapIntervalSGI = (PFNGLXSWAPINTERVALSGIPROC) glXGetProcAddress((const GLubyte *)"glXSwapIntervalSGI"); } Debug(3, "video/glx: GlxSwapIntervalSGI=%p\n", GlxSwapIntervalSGI); #endif #ifdef GLX_SGI_video_sync if (glx_GLX_SGI_video_sync) { GlxGetVideoSyncSGI = (PFNGLXGETVIDEOSYNCSGIPROC) glXGetProcAddress((const GLubyte *)"glXGetVideoSyncSGI"); } Debug(3, "video/glx: GlxGetVideoSyncSGI=%p\n", GlxGetVideoSyncSGI); #endif // glXGetVideoSyncSGI glXWaitVideoSyncSGI #if 0 // FIXME: use xcb: xcb_glx_create_context #endif // create glx context glXMakeCurrent(XlibDisplay, None, NULL); vi = glXChooseVisual(XlibDisplay, DefaultScreen(XlibDisplay), visual_attr); if (!vi) { Error(_("video/glx: can't get a RGB visual\n")); GlxEnabled = 0; return; } if (!vi->visual) { Error(_("video/glx: no valid visual found\n")); GlxEnabled = 0; return; } if (vi->bits_per_rgb < 8) { Error(_("video/glx: need atleast 8-bits per RGB\n")); GlxEnabled = 0; return; } context = glXCreateContext(XlibDisplay, vi, NULL, GL_TRUE); if (!context) { Error(_("video/glx: can't create glx context\n")); GlxEnabled = 0; return; } GlxSharedContext = context; context = glXCreateContext(XlibDisplay, vi, GlxSharedContext, GL_TRUE); if (!context) { Error(_("video/glx: can't create glx context\n")); GlxEnabled = 0; glXDestroyContext(XlibDisplay, GlxSharedContext); GlxSharedContext = 0; return; } GlxContext = context; GlxVisualInfo = vi; Debug(3, "video/glx: visual %#02x depth %u\n", (unsigned)vi->visualid, vi->depth); // // query default v-sync state // if (glx_GLX_EXT_swap_control) { unsigned tmp; tmp = -1; glXQueryDrawable(XlibDisplay, DefaultRootWindow(XlibDisplay), GLX_SWAP_INTERVAL_EXT, &tmp); GlxCheck(); Debug(3, "video/glx: default v-sync is %d\n", tmp); } else { Debug(3, "video/glx: default v-sync is unknown\n"); } // // disable wait on v-sync // // FIXME: sleep before swap / busy waiting hardware // FIXME: 60hz lcd panel // FIXME: config: default, on, off #ifdef GLX_SGI_swap_control if (GlxVSyncEnabled < 0 && GlxSwapIntervalSGI) { if (GlxSwapIntervalSGI(0)) { GlxCheck(); Warning(_("video/glx: can't disable v-sync\n")); } else { Info(_("video/glx: v-sync disabled\n")); } } else #endif #ifdef GLX_MESA_swap_control if (GlxVSyncEnabled < 0 && GlxSwapIntervalMESA) { if (GlxSwapIntervalMESA(0)) { GlxCheck(); Warning(_("video/glx: can't disable v-sync\n")); } else { Info(_("video/glx: v-sync disabled\n")); } } #endif // // enable wait on v-sync // #ifdef GLX_SGI_swap_control if (GlxVSyncEnabled > 0 && GlxSwapIntervalMESA) { if (GlxSwapIntervalMESA(1)) { GlxCheck(); Warning(_("video/glx: can't enable v-sync\n")); } else { Info(_("video/glx: v-sync enabled\n")); } } else #endif #ifdef GLX_MESA_swap_control if (GlxVSyncEnabled > 0 && GlxSwapIntervalSGI) { if (GlxSwapIntervalSGI(1)) { GlxCheck(); Warning(_("video/glx: can't enable v-sync\n")); } else { Info(_("video/glx: v-sync enabled\n")); } } #endif } /// /// Cleanup GLX. /// static void GlxExit(void) { Debug(3, "video/glx: %s\n", __FUNCTION__); glFinish(); // must destroy glx if (glXGetCurrentContext() == GlxContext) { // if currently used, set to none glXMakeCurrent(XlibDisplay, None, NULL); } if (GlxSharedContext) { glXDestroyContext(XlibDisplay, GlxSharedContext); } if (GlxContext) { glXDestroyContext(XlibDisplay, GlxContext); } if (GlxThreadContext) { glXDestroyContext(XlibDisplay, GlxThreadContext); } // FIXME: must free GlxVisualInfo } #endif //---------------------------------------------------------------------------- // common functions //---------------------------------------------------------------------------- /// /// Calculate resolution group. /// /// @param width video picture raw width /// @param height video picture raw height /// @param interlace flag interlaced video picture /// /// @note interlace isn't used yet and probably wrong set by caller. /// static VideoResolutions VideoResolutionGroup(int width, int height, __attribute__ ((unused)) int interlace) { if (height <= 576) { return VideoResolution576i; } if (height <= 720) { return VideoResolution720p; } if (height < 1080) { return VideoResolutionFake1080i; } if (width < 1920) { return VideoResolutionFake1080i; } return VideoResolution1080i; } //---------------------------------------------------------------------------- // auto-crop //---------------------------------------------------------------------------- /// /// auto-crop context structure and typedef. /// typedef struct _auto_crop_ctx_ { int X1; ///< detected left border int X2; ///< detected right border int Y1; ///< detected top border int Y2; ///< detected bottom border int Count; ///< counter to delay switch int State; ///< auto-crop state (0, 14, 16) } AutoCropCtx; #ifdef USE_AUTOCROP #define YBLACK 0x20 ///< below is black #define UVBLACK 0x80 ///< around is black #define M64 UINT64_C(0x0101010101010101) ///< 64bit multiplicator /// auto-crop percent of video width to ignore logos static const int AutoCropLogoIgnore = 24; static int AutoCropInterval; ///< auto-crop check interval static int AutoCropDelay; ///< auto-crop switch delay static int AutoCropTolerance; ///< auto-crop tolerance /// /// Detect black line Y. /// /// @param data Y plane pixel data /// @param length number of pixel to check /// @param pitch offset of pixels /// /// @note 8 pixel are checked at once, all values must be 8 aligned /// static int AutoCropIsBlackLineY(const uint8_t * data, int length, int pitch) { int n; int o; uint64_t r; const uint64_t *p; #ifdef DEBUG if ((size_t) data & 0x7 || pitch & 0x7) { abort(); } #endif p = (const uint64_t *)data; n = length; // FIXME: can remove n o = pitch / 8; r = 0UL; while (--n >= 0) { r |= *p; p += o; } // below YBLACK(0x20) is black return !(r & ~((YBLACK - 1) * M64)); } /// /// Auto detect black borders and crop them. /// /// @param autocrop auto-crop variables /// @param width frame width in pixel /// @param height frame height in pixel /// @param data frame planes data (Y, U, V) /// @param pitches frame planes pitches (Y, U, V) /// /// @note FIXME: can reduce the checked range, left, right crop isn't /// used yet. /// /// @note FIXME: only Y is checked, for black. /// static void AutoCropDetect(AutoCropCtx * autocrop, int width, int height, void *data[3], uint32_t pitches[3]) { const void *data_y; unsigned length_y; int x; int y; int x1; int x2; int y1; int y2; int logo_skip; // // ignore top+bottom 6 lines and left+right 8 pixels // #define SKIP_X 8 #define SKIP_Y 6 x1 = width - 1; x2 = 0; y1 = height - 1; y2 = 0; logo_skip = SKIP_X + (((width * AutoCropLogoIgnore) / 100 + 8) / 8) * 8; data_y = data[0]; length_y = pitches[0]; // // search top // for (y = SKIP_Y; y < y1; ++y) { if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) { if (y == SKIP_Y) { y = 0; } y1 = y; break; } } // // search bottom // for (y = height - SKIP_Y - 1; y > y2; --y) { if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) { if (y == height - SKIP_Y - 1) { y = height - 1; } y2 = y; break; } } // // search left // for (x = SKIP_X; x < x1; x += 8) { if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y, length_y)) { if (x == SKIP_X) { x = 0; } x1 = x; break; } } // // search right // for (x = width - SKIP_X - 8; x > x2; x -= 8) { if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y * 8, length_y)) { if (x == width - SKIP_X - 8) { x = width - 1; } x2 = x; break; } } if (0 && (y1 > SKIP_Y || x1 > SKIP_X)) { Debug(3, "video/autocrop: top=%d bottom=%d left=%d right=%d\n", y1, y2, x1, x2); } autocrop->X1 = x1; autocrop->X2 = x2; autocrop->Y1 = y1; autocrop->Y2 = y2; } #endif //---------------------------------------------------------------------------- // software - deinterlace //---------------------------------------------------------------------------- // FIXME: move general software deinterlace functions to here. //---------------------------------------------------------------------------- // VA-API //---------------------------------------------------------------------------- #ifdef USE_VAAPI static char VaapiBuggyXvBA; ///< fix xvba-video bugs static char VaapiBuggyVdpau; ///< fix libva-driver-vdpau bugs static char VaapiBuggyIntel; ///< fix libva-driver-intel bugs static VADisplay *VaDisplay; ///< VA-API display static VAImage VaOsdImage = { .image_id = VA_INVALID_ID }; ///< osd VA-API image static VASubpictureID VaOsdSubpicture = VA_INVALID_ID; ///< osd VA-API subpicture static char VaapiUnscaledOsd; ///< unscaled osd supported #if VA_CHECK_VERSION(0,33,99) static char VaapiVideoProcessing; ///< supports video processing #endif /// VA-API decoder typedef typedef struct _vaapi_decoder_ VaapiDecoder; /// /// VA-API decoder /// struct _vaapi_decoder_ { VADisplay *VaDisplay; ///< VA-API display xcb_window_t Window; ///< output window int VideoX; ///< video base x coordinate int VideoY; ///< video base y coordinate int VideoWidth; ///< video base width int VideoHeight; ///< video base height int OutputX; ///< real video output x coordinate int OutputY; ///< real video output y coordinate int OutputWidth; ///< real video output width int OutputHeight; ///< real video output height /// flags for put surface for different resolutions groups unsigned SurfaceFlagsTable[VideoResolutionMax]; enum PixelFormat PixFmt; ///< ffmpeg frame pixfmt int WrongInterlacedWarned; ///< warning about interlace flag issued int Interlaced; ///< ffmpeg interlaced flag int TopFieldFirst; ///< ffmpeg top field displayed first VAImage DeintImages[5]; ///< deinterlace image buffers int GetPutImage; ///< flag get/put image can be used VAImage Image[1]; ///< image buffer to update surface VAProfile Profile; ///< VA-API profile VAEntrypoint Entrypoint; ///< VA-API entrypoint struct vaapi_context VaapiContext[1]; ///< ffmpeg VA-API context int SurfacesNeeded; ///< number of surface to request int SurfaceUsedN; ///< number of used surfaces /// used surface ids VASurfaceID SurfacesUsed[CODEC_SURFACES_MAX]; int SurfaceFreeN; ///< number of free surfaces /// free surface ids VASurfaceID SurfacesFree[CODEC_SURFACES_MAX]; int InputWidth; ///< video input width int InputHeight; ///< video input height AVRational InputAspect; ///< video input aspect ratio VideoResolutions Resolution; ///< resolution group int CropX; ///< video crop x int CropY; ///< video crop y int CropWidth; ///< video crop width int CropHeight; ///< video crop height #ifdef USE_AUTOCROP AutoCropCtx AutoCrop[1]; ///< auto-crop variables #endif #ifdef USE_GLX GLuint GlTextures[2]; ///< gl texture for VA-API void *GlxSurfaces[2]; ///< VA-API/GLX surface #endif VASurfaceID BlackSurface; ///< empty black surface /// video surface ring buffer VASurfaceID SurfacesRb[VIDEO_SURFACES_MAX]; #ifdef VA_EXP VASurfaceID LastSurface; ///< last surface #endif int SurfaceWrite; ///< write pointer int SurfaceRead; ///< read pointer atomic_t SurfacesFilled; ///< how many of the buffer is used int SurfaceField; ///< current displayed field int TrickSpeed; ///< current trick speed int TrickCounter; ///< current trick speed counter struct timespec FrameTime; ///< time of last display VideoStream *Stream; ///< video stream int Closing; ///< flag about closing current stream int SyncOnAudio; ///< flag sync to audio int64_t PTS; ///< video PTS clock int LastAVDiff; ///< last audio - video difference int SyncCounter; ///< counter to sync frames int StartCounter; ///< counter for video start int FramesDuped; ///< number of frames duplicated int FramesMissed; ///< number of frames missed int FramesDropped; ///< number of frames dropped int FrameCounter; ///< number of frames decoded int FramesDisplayed; ///< number of frames displayed }; static VaapiDecoder *VaapiDecoders[1]; ///< open decoder streams static int VaapiDecoderN; ///< number of decoder streams /// forward display back surface static void VaapiBlackSurface(VaapiDecoder *); /// forward destroy deinterlace images static void VaapiDestroyDeinterlaceImages(VaapiDecoder *); /// forward definition release surface static void VaapiReleaseSurface(VaapiDecoder *, VASurfaceID); //---------------------------------------------------------------------------- // VA-API Functions //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- /// /// Output video messages. /// /// Reduce output. /// /// @param level message level (Error, Warning, Info, Debug, ...) /// @param format printf format string (NULL to flush messages) /// @param ... printf arguments /// /// @returns true, if message shown /// /// @todo FIXME: combine VdpauMessage and VaapiMessage /// static int VaapiMessage(int level, const char *format, ...) { if (SysLogLevel > level || DebugLevel > level) { static const char *last_format; static char buf[256]; va_list ap; va_start(ap, format); if (format != last_format) { // don't repeat same message if (buf[0]) { // print last repeated message syslog(LOG_ERR, "%s", buf); buf[0] = '\0'; } if (format) { last_format = format; vsyslog(LOG_ERR, format, ap); } va_end(ap); return 1; } vsnprintf(buf, sizeof(buf), format, ap); va_end(ap); } return 0; } // Surfaces ------------------------------------------------------------- /// /// Associate OSD with surface. /// /// @param decoder VA-API decoder /// static void VaapiAssociate(VaapiDecoder * decoder) { int x; int y; int w; int h; if (VaOsdSubpicture == VA_INVALID_ID) { Warning(_("video/vaapi: no osd subpicture yet\n")); return; } x = 0; y = 0; w = VaOsdImage.width; h = VaOsdImage.height; // FIXME: associate only if osd is displayed if (VaapiUnscaledOsd) { if (decoder->SurfaceFreeN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN, x, y, w, h, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } if (decoder->SurfaceUsedN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN, x, y, w, h, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } else { if (decoder->SurfaceFreeN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN, x, y, w, h, decoder->CropX, decoder->CropY / 2, decoder->CropWidth, decoder->CropHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } if (decoder->SurfaceUsedN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN, x, y, w, h, decoder->CropX, decoder->CropY / 2, decoder->CropWidth, decoder->CropHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } } /// /// Deassociate OSD with surface. /// /// @param decoder VA-API decoder /// static void VaapiDeassociate(VaapiDecoder * decoder) { if (VaOsdSubpicture != VA_INVALID_ID) { if (decoder->SurfaceFreeN && vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate %d surfaces\n"), decoder->SurfaceFreeN); } if (decoder->SurfaceUsedN && vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate %d surfaces\n"), decoder->SurfaceUsedN); } } } /// /// Create surfaces for VA-API decoder. /// /// @param decoder VA-API decoder /// @param width surface source/video width /// @param height surface source/video height /// static void VaapiCreateSurfaces(VaapiDecoder * decoder, int width, int height) { #ifdef DEBUG if (!decoder->SurfacesNeeded) { Error(_("video/vaapi: surface needed not set\n")); decoder->SurfacesNeeded = 3 + VIDEO_SURFACES_MAX; } #endif Debug(3, "video/vaapi: %s: %dx%d * %d\n", __FUNCTION__, width, height, decoder->SurfacesNeeded); decoder->SurfaceFreeN = decoder->SurfacesNeeded; // VA_RT_FORMAT_YUV420 VA_RT_FORMAT_YUV422 VA_RT_FORMAT_YUV444 if (vaCreateSurfaces(decoder->VaDisplay, VA_RT_FORMAT_YUV420, width, height, decoder->SurfacesFree, decoder->SurfaceFreeN, NULL, 0) != VA_STATUS_SUCCESS) { Fatal(_("video/vaapi: can't create %d surfaces\n"), decoder->SurfaceFreeN); // FIXME: write error handler / fallback } } /// /// Destroy surfaces of VA-API decoder. /// /// @param decoder VA-API decoder /// static void VaapiDestroySurfaces(VaapiDecoder * decoder) { Debug(3, "video/vaapi: %s:\n", __FUNCTION__); // // update OSD associate // VaapiDeassociate(decoder); if (vaDestroySurfaces(decoder->VaDisplay, decoder->SurfacesFree, decoder->SurfaceFreeN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy %d surfaces\n"), decoder->SurfaceFreeN); } decoder->SurfaceFreeN = 0; if (vaDestroySurfaces(decoder->VaDisplay, decoder->SurfacesUsed, decoder->SurfaceUsedN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy %d surfaces\n"), decoder->SurfaceUsedN); } decoder->SurfaceUsedN = 0; // FIXME surfaces used for output } /// /// Get a free surface. /// /// @param decoder VA-API decoder /// /// @returns the oldest free surface /// static VASurfaceID VaapiGetSurface0(VaapiDecoder * decoder) { VASurfaceID surface; VASurfaceStatus status; int i; // try to use oldest surface for (i = 0; i < decoder->SurfaceFreeN; ++i) { surface = decoder->SurfacesFree[i]; if (vaQuerySurfaceStatus(decoder->VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { // this fails with XvBA und mpeg softdecoder if (!VaapiBuggyXvBA) { Error(_("video/vaapi: vaQuerySurface failed\n")); } status = VASurfaceReady; } // surface still in use, try next if (status != VASurfaceReady) { Debug(4, "video/vaapi: surface %#010x not ready: %d\n", surface, status); if (!VaapiBuggyVdpau || i < 1) { continue; } usleep(1 * 1000); } // copy remaining surfaces down decoder->SurfaceFreeN--; for (; i < decoder->SurfaceFreeN; ++i) { decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1]; } decoder->SurfacesFree[i] = VA_INVALID_ID; // save as used decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface; return surface; } Error(_("video/vaapi: out of surfaces\n")); return VA_INVALID_ID; } /// /// Release a surface. /// /// @param decoder VA-API decoder /// @param surface surface no longer used /// static void VaapiReleaseSurface(VaapiDecoder * decoder, VASurfaceID surface) { int i; for (i = 0; i < decoder->SurfaceUsedN; ++i) { if (decoder->SurfacesUsed[i] == surface) { // no problem, with last used decoder->SurfacesUsed[i] = decoder->SurfacesUsed[--decoder->SurfaceUsedN]; decoder->SurfacesFree[decoder->SurfaceFreeN++] = surface; return; } } Error(_("video/vaapi: release surface %#010x, which is not in use\n"), surface); } // Init/Exit ------------------------------------------------------------ /// /// Debug VA-API decoder frames drop... /// /// @param decoder video hardware decoder /// static void VaapiPrintFrames(const VaapiDecoder * decoder) { Debug(3, "video/vaapi: %d missed, %d duped, %d dropped frames of %d,%d\n", decoder->FramesMissed, decoder->FramesDuped, decoder->FramesDropped, decoder->FrameCounter, decoder->FramesDisplayed); #ifndef DEBUG (void)decoder; #endif } /// /// Initialize surface flags. /// /// @param decoder video hardware decoder /// static void VaapiInitSurfaceFlags(VaapiDecoder * decoder) { int i; for (i = 0; i < VideoResolutionMax; ++i) { decoder->SurfaceFlagsTable[i] = VA_CLEAR_DRAWABLE; // color space conversion none, ITU-R BT.601, ITU-R BT.709, ... switch (VideoColorSpaces[i]) { case VideoColorSpaceNone: break; case VideoColorSpaceBt601: decoder->SurfaceFlagsTable[i] |= VA_SRC_BT601; break; case VideoColorSpaceBt709: decoder->SurfaceFlagsTable[i] |= VA_SRC_BT709; break; case VideoColorSpaceSmpte240: decoder->SurfaceFlagsTable[i] |= VA_SRC_SMPTE_240; break; } // scaling flags FAST, HQ, NL_ANAMORPHIC switch (VideoScaling[i]) { case VideoScalingNormal: decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_DEFAULT; break; case VideoScalingFast: decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_FAST; break; case VideoScalingHQ: // vdpau backend supports only VA_FILTER_SCALING_HQ // vdpau backend with advanced deinterlacer and my GT-210 // is too slow decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_HQ; break; case VideoScalingAnamorphic: // intel backend supports only VA_FILTER_SCALING_NL_ANAMORPHIC; // FIXME: Highlevel should display 4:3 as 16:9 to support this decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_NL_ANAMORPHIC; break; } // deinterlace flags (not yet supported by libva) switch (VideoDeinterlace[i]) { case VideoDeinterlaceBob: break; case VideoDeinterlaceWeave: break; case VideoDeinterlaceTemporal: //FIXME: private hack //decoder->SurfaceFlagsTable[i] |= 0x00002000; break; case VideoDeinterlaceTemporalSpatial: //FIXME: private hack //decoder->SurfaceFlagsTable[i] |= 0x00006000; break; default: break; } } } /// /// Allocate new VA-API decoder. /// /// @returns a new prepared VA-API hardware decoder. /// static VaapiDecoder *VaapiNewHwDecoder(VideoStream * stream) { VaapiDecoder *decoder; int i; if (VaapiDecoderN == 1) { Fatal(_("video/vaapi: out of decoders\n")); } if (!(decoder = calloc(1, sizeof(*decoder)))) { Fatal(_("video/vaapi: out of memory\n")); } decoder->VaDisplay = VaDisplay; decoder->Window = VideoWindow; decoder->VideoX = 0; decoder->VideoY = 0; decoder->VideoWidth = VideoWindowWidth; decoder->VideoHeight = VideoWindowHeight; VaapiInitSurfaceFlags(decoder); decoder->DeintImages[0].image_id = VA_INVALID_ID; decoder->DeintImages[1].image_id = VA_INVALID_ID; decoder->DeintImages[2].image_id = VA_INVALID_ID; decoder->DeintImages[3].image_id = VA_INVALID_ID; decoder->DeintImages[4].image_id = VA_INVALID_ID; decoder->Image->image_id = VA_INVALID_ID; for (i = 0; i < CODEC_SURFACES_MAX; ++i) { decoder->SurfacesUsed[i] = VA_INVALID_ID; decoder->SurfacesFree[i] = VA_INVALID_ID; } // setup video surface ring buffer atomic_set(&decoder->SurfacesFilled, 0); for (i = 0; i < VIDEO_SURFACES_MAX; ++i) { decoder->SurfacesRb[i] = VA_INVALID_ID; } #ifdef VA_EXP decoder->LastSurface = VA_INVALID_ID; #endif decoder->BlackSurface = VA_INVALID_ID; // // Setup ffmpeg vaapi context // decoder->Profile = VA_INVALID_ID; decoder->Entrypoint = VA_INVALID_ID; decoder->VaapiContext->display = VaDisplay; decoder->VaapiContext->config_id = VA_INVALID_ID; decoder->VaapiContext->context_id = VA_INVALID_ID; #ifdef USE_GLX decoder->GlxSurfaces[0] = NULL; decoder->GlxSurfaces[1] = NULL; if (GlxEnabled) { // FIXME: create GLX context here } #endif decoder->OutputWidth = VideoWindowWidth; decoder->OutputHeight = VideoWindowHeight; decoder->PixFmt = PIX_FMT_NONE; decoder->Stream = stream; if (!VaapiDecoderN) { // FIXME: hack sync on audio decoder->SyncOnAudio = 1; } decoder->Closing = -300 - 1; decoder->PTS = AV_NOPTS_VALUE; // old va-api intel driver didn't supported get/put-image. #if VA_CHECK_VERSION(0,33,99) // FIXME: not the exact version with support decoder->GetPutImage = 1; #else decoder->GetPutImage = !VaapiBuggyIntel; #endif VaapiDecoders[VaapiDecoderN++] = decoder; return decoder; } /// /// Cleanup VA-API. /// /// @param decoder va-api hw decoder /// static void VaapiCleanup(VaapiDecoder * decoder) { int filled; VASurfaceID surface; int i; // flush output queue, only 1-2 frames buffered, no big loss while ((filled = atomic_read(&decoder->SurfacesFilled))) { decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX; atomic_dec(&decoder->SurfacesFilled); surface = decoder->SurfacesRb[decoder->SurfaceRead]; if (surface == VA_INVALID_ID) { Error(_("video/vaapi: invalid surface in ringbuffer\n")); continue; } // can crash and hang if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #ifdef DEBUG if (decoder->SurfaceRead != decoder->SurfaceWrite) { abort(); } #endif // clear ring buffer for (i = 0; i < VIDEO_SURFACES_MAX; ++i) { decoder->SurfacesRb[i] = VA_INVALID_ID; } #ifdef VA_EXP decoder->LastSurface = VA_INVALID_ID; #endif decoder->WrongInterlacedWarned = 0; // cleanup image if (decoder->Image->image_id != VA_INVALID_ID) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } // cleanup context and config if (decoder->VaapiContext) { if (decoder->VaapiContext->context_id != VA_INVALID_ID) { if (vaDestroyContext(VaDisplay, decoder->VaapiContext->context_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy context!\n")); } decoder->VaapiContext->context_id = VA_INVALID_ID; } if (decoder->VaapiContext->config_id != VA_INVALID_ID) { if (vaDestroyConfig(VaDisplay, decoder->VaapiContext->config_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy config!\n")); } decoder->VaapiContext->config_id = VA_INVALID_ID; } } // cleanup surfaces if (decoder->SurfaceFreeN || decoder->SurfaceUsedN) { VaapiDestroySurfaces(decoder); } // cleanup images if (decoder->DeintImages[0].image_id != VA_INVALID_ID) { VaapiDestroyDeinterlaceImages(decoder); } decoder->SurfaceRead = 0; decoder->SurfaceWrite = 0; decoder->SurfaceField = 0; decoder->SyncCounter = 0; decoder->FrameCounter = 0; decoder->FramesDisplayed = 0; decoder->StartCounter = 0; decoder->Closing = 0; decoder->PTS = AV_NOPTS_VALUE; VideoDeltaPTS = 0; } /// /// Destroy a VA-API decoder. /// /// @param decoder VA-API decoder /// static void VaapiDelHwDecoder(VaapiDecoder * decoder) { int i; for (i = 0; i < VaapiDecoderN; ++i) { if (VaapiDecoders[i] == decoder) { VaapiDecoders[i] = NULL; VaapiDecoderN--; // FIXME: must copy last slot into empty slot and -- break; } } VaapiCleanup(decoder); if (decoder->BlackSurface != VA_INVALID_ID) { // // update OSD associate // if (VaOsdSubpicture != VA_INVALID_ID) { if (vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, &decoder->BlackSurface, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate black surfaces\n")); } } if (vaDestroySurfaces(decoder->VaDisplay, &decoder->BlackSurface, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy a surface\n")); } } #ifdef USE_GLX if (decoder->GlxSurfaces[0]) { if (vaDestroySurfaceGLX(VaDisplay, decoder->GlxSurfaces[0]) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy glx surface!\n")); } decoder->GlxSurfaces[0] = NULL; } if (decoder->GlxSurfaces[1]) { if (vaDestroySurfaceGLX(VaDisplay, decoder->GlxSurfaces[1]) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy glx surface!\n")); } decoder->GlxSurfaces[0] = NULL; } if (decoder->GlTextures[0]) { glDeleteTextures(2, decoder->GlTextures); } #endif VaapiPrintFrames(decoder); free(decoder); } #ifdef DEBUG // currently unused, keep it for later static VAProfile VaapiFindProfile(const VAProfile * profiles, unsigned n, VAProfile profile); static VAEntrypoint VaapiFindEntrypoint(const VAEntrypoint * entrypoints, unsigned n, VAEntrypoint entrypoint); /// /// 1080i /// static void Vaapi1080i(void) { VAProfile profiles[vaMaxNumProfiles(VaDisplay)]; int profile_n; VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int p; int e; VAConfigAttrib attrib; VAConfigID config_id; VAContextID context_id; VASurfaceID surfaces[32]; VAImage image[1]; int n; uint32_t start_tick; uint32_t tick; p = -1; e = -1; // prepare va-api profiles if (vaQueryConfigProfiles(VaDisplay, profiles, &profile_n)) { Error(_("codec: vaQueryConfigProfiles failed")); return; } // check profile p = VaapiFindProfile(profiles, profile_n, VAProfileH264High); if (p == -1) { Debug(3, "\tno profile found\n"); return; } // prepare va-api entry points if (vaQueryConfigEntrypoints(VaDisplay, p, entrypoints, &entrypoint_n)) { Error(_("codec: vaQueryConfigEntrypoints failed")); return; } e = VaapiFindEntrypoint(entrypoints, entrypoint_n, VAEntrypointVLD); if (e == -1) { Warning(_("codec: unsupported: slow path\n")); return; } memset(&attrib, 0, sizeof(attrib)); attrib.type = VAConfigAttribRTFormat; attrib.value = VA_RT_FORMAT_YUV420; // create a configuration for the decode pipeline if (vaCreateConfig(VaDisplay, p, e, &attrib, 1, &config_id)) { Error(_("codec: can't create config")); return; } if (vaCreateSurfaces(VaDisplay, VA_RT_FORMAT_YUV420, 1920, 1080, surfaces, 32, NULL, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create surfaces\n")); return; } // bind surfaces to context if (vaCreateContext(VaDisplay, config_id, 1920, 1080, VA_PROGRESSIVE, surfaces, 32, &context_id)) { Error(_("codec: can't create context")); return; } #if 1 // without this 1080i will crash image->image_id = VA_INVALID_ID; if (vaDeriveImage(VaDisplay, surfaces[0], image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed\n")); } if (image->image_id != VA_INVALID_ID) { if (vaDestroyImage(VaDisplay, image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } } #else vaBeginPicture(VaDisplay, context_id, surfaces[0]); vaRenderPicture(VaDisplay, context_id, NULL, 0); // aborts without valid buffers upload vaEndPicture(VaDisplay, context_id); #endif start_tick = GetMsTicks(); for (n = 1; n < 2; ++n) { if (vaPutSurface(VaDisplay, surfaces[0], VideoWindow, // decoder src 0, 0, 1920, 1080, // video dst 0, 0, 1920, 1080, NULL, 0, VA_TOP_FIELD | VA_CLEAR_DRAWABLE) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed\n")); } if (vaPutSurface(VaDisplay, surfaces[0], VideoWindow, // decoder src 0, 0, 1920, 1080, // video dst 0, 0, 1920, 1080, NULL, 0, VA_BOTTOM_FIELD | VA_CLEAR_DRAWABLE) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed\n")); } tick = GetMsTicks(); if (!(n % 10)) { fprintf(stderr, "%dms / frame\n", (tick - start_tick) / n); } } // destory the stuff. if (vaDestroyContext(VaDisplay, context_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy context!\n")); } if (vaDestroySurfaces(VaDisplay, surfaces, 32) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy surfaces\n")); } if (vaDestroyConfig(VaDisplay, config_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy config!\n")); } fprintf(stderr, "done\n"); } #endif /// /// VA-API setup. /// /// @param display_name x11/xcb display name /// /// @returns true if VA-API could be initialized, false otherwise. /// static int VaapiInit(const char *display_name) { int major; int minor; VADisplayAttribute attr; const char *s; VaOsdImage.image_id = VA_INVALID_ID; VaOsdSubpicture = VA_INVALID_ID; #ifdef USE_GLX if (GlxEnabled) { // support glx VaDisplay = vaGetDisplayGLX(XlibDisplay); } else #endif { VaDisplay = vaGetDisplay(XlibDisplay); } if (!VaDisplay) { Error(_("video/vaapi: Can't connect VA-API to X11 server on '%s'\n"), display_name); return 0; } // XvBA needs this: setenv("DISPLAY", display_name, 1); if (vaInitialize(VaDisplay, &major, &minor) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: Can't inititialize VA-API on '%s'\n"), display_name); vaTerminate(VaDisplay); VaDisplay = NULL; return 0; } s = vaQueryVendorString(VaDisplay); Info(_("video/vaapi: libva %d.%d (%s) initialized\n"), major, minor, s); // // Setup fixes for driver bugs. // if (strstr(s, "VDPAU")) { Info(_("video/vaapi: use vdpau bug workaround\n")); setenv("VDPAU_VIDEO_PUTSURFACE_FAST", "0", 0); VaapiBuggyVdpau = 1; } if (strstr(s, "XvBA")) { VaapiBuggyXvBA = 1; } if (strstr(s, "Intel i965")) { VaapiBuggyIntel = 1; } // // check which attributes are supported // attr.type = VADisplayAttribBackgroundColor; attr.flags = VA_DISPLAY_ATTRIB_SETTABLE; if (vaGetDisplayAttributes(VaDisplay, &attr, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: Can't get background-color attribute\n")); attr.value = 1; } Info(_("video/vaapi: background-color is %s\n"), attr.value ? _("supported") : _("unsupported")); // FIXME: VaapiSetBackground(VideoBackground); #if 0 // // check the chroma format // attr.type = VAConfigAttribRTFormat attr.flags = VA_DISPLAY_ATTRIB_GETTABLE; Vaapi1080i(); #endif #if VA_CHECK_VERSION(0,33,99) // // check vpp support // if (1) { VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int i; VaapiVideoProcessing = 0; if (!vaQueryConfigEntrypoints(VaDisplay, VAProfileNone, entrypoints, &entrypoint_n)) { for (i = 0; i < entrypoint_n; i++) { if (entrypoints[i] == VAEntrypointVideoProc) { Info("video/vaapi: supports video processing\n"); VaapiVideoProcessing = 1; break; } } } } #endif return 1; } #ifdef USE_GLX /// /// VA-API GLX setup. /// /// @param display_name x11/xcb display name /// /// @returns true if VA-API could be initialized, false otherwise. /// static int VaapiGlxInit(const char *display_name) { GlxEnabled = 1; GlxInit(); if (GlxEnabled) { GlxSetupWindow(VideoWindow, VideoWindowWidth, VideoWindowHeight, GlxContext); } if (!GlxEnabled) { Error(_("video/glx: glx error\n")); } return VaapiInit(display_name); } #endif /// /// VA-API cleanup /// static void VaapiExit(void) { int i; // FIXME: more VA-API cleanups... for (i = 0; i < VaapiDecoderN; ++i) { if (VaapiDecoders[i]) { VaapiDelHwDecoder(VaapiDecoders[i]); VaapiDecoders[i] = NULL; } } VaapiDecoderN = 0; if (!VaDisplay) { vaTerminate(VaDisplay); VaDisplay = NULL; } } //---------------------------------------------------------------------------- /// /// Update output for new size or aspect ratio. /// /// @param decoder VA-API decoder /// static void VaapiUpdateOutput(VaapiDecoder * decoder) { VideoUpdateOutput(decoder->InputAspect, decoder->InputWidth, decoder->InputHeight, decoder->Resolution, decoder->VideoX, decoder->VideoY, decoder->VideoWidth, decoder->VideoHeight, &decoder->OutputX, &decoder->OutputY, &decoder->OutputWidth, &decoder->OutputHeight, &decoder->CropX, &decoder->CropY, &decoder->CropWidth, &decoder->CropHeight); #ifdef USE_AUTOCROP decoder->AutoCrop->State = 0; decoder->AutoCrop->Count = AutoCropDelay; #endif } /// /// Find VA-API image format. /// /// @param decoder VA-API decoder /// @param pix_fmt ffmpeg pixel format /// @param[out] format image format /// /// FIXME: can fallback from I420 to YV12, if not supported /// FIXME: must check if put/get with this format is supported (see intel) /// static int VaapiFindImageFormat(VaapiDecoder * decoder, enum PixelFormat pix_fmt, VAImageFormat * format) { VAImageFormat *imgfrmts; int imgfrmt_n; int i; unsigned fourcc; switch (pix_fmt) { // convert ffmpeg to VA-API // NV12, YV12, I420, BGRA // intel: I420 is native format for MPEG-2 decoded surfaces // intel: NV12 is native format for H.264 decoded surfaces case PIX_FMT_YUV420P: case PIX_FMT_YUVJ420P: // fourcc = VA_FOURCC_YV12; // YVU fourcc = VA_FOURCC('I', '4', '2', '0'); // YUV break; case PIX_FMT_NV12: fourcc = VA_FOURCC_NV12; break; default: Fatal(_("video/vaapi: unsupported pixel format %d\n"), pix_fmt); } imgfrmt_n = vaMaxNumImageFormats(decoder->VaDisplay); imgfrmts = alloca(imgfrmt_n * sizeof(*imgfrmts)); if (vaQueryImageFormats(decoder->VaDisplay, imgfrmts, &imgfrmt_n) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQueryImageFormats failed\n")); return 0; } Debug(3, "video/vaapi: search format %c%c%c%c in %d image formats\n", fourcc, fourcc >> 8, fourcc >> 16, fourcc >> 24, imgfrmt_n); Debug(3, "video/vaapi: supported image formats:\n"); for (i = 0; i < imgfrmt_n; ++i) { Debug(3, "video/vaapi:\t%c%c%c%c\t%d\n", imgfrmts[i].fourcc, imgfrmts[i].fourcc >> 8, imgfrmts[i].fourcc >> 16, imgfrmts[i].fourcc >> 24, imgfrmts[i].depth); } // // search image format // for (i = 0; i < imgfrmt_n; ++i) { if (imgfrmts[i].fourcc == fourcc) { *format = imgfrmts[i]; Debug(3, "video/vaapi: use\t%c%c%c%c\t%d\n", imgfrmts[i].fourcc, imgfrmts[i].fourcc >> 8, imgfrmts[i].fourcc >> 16, imgfrmts[i].fourcc >> 24, imgfrmts[i].depth); return 1; } } Fatal("video/vaapi: pixel format %d unsupported by VA-API\n", pix_fmt); // FIXME: no fatal error! return 0; } /// /// Configure VA-API for new video format. /// /// @param decoder VA-API decoder /// static void VaapiSetup(VaapiDecoder * decoder, const AVCodecContext * video_ctx) { int width; int height; VAImageFormat format[1]; // create initial black surface and display VaapiBlackSurface(decoder); // cleanup last context VaapiCleanup(decoder); width = video_ctx->width; height = video_ctx->height; #ifdef DEBUG // FIXME: remove this if if (decoder->Image->image_id != VA_INVALID_ID) { abort(); // should be done by VaapiCleanup() } #endif // FIXME: PixFmt not set! //VaapiFindImageFormat(decoder, decoder->PixFmt, format); VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); // FIXME: this image is only needed for software decoder and auto-crop if (decoder->GetPutImage && vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } Debug(3, "video/vaapi: created image %dx%d with id 0x%08x and buffer id 0x%08x\n", width, height, decoder->Image->image_id, decoder->Image->buf); // FIXME: interlaced not valid here? decoder->Resolution = VideoResolutionGroup(width, height, decoder->Interlaced); VaapiCreateSurfaces(decoder, width, height); #ifdef USE_GLX if (GlxEnabled) { // FIXME: destroy old context GlxSetupDecoder(decoder->InputWidth, decoder->InputHeight, decoder->GlTextures); // FIXME: try two textures if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, decoder->GlTextures[0], &decoder->GlxSurfaces[0]) != VA_STATUS_SUCCESS) { Fatal(_("video/glx: can't create glx surfaces\n")); // FIXME: no fatal here } /* if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, decoder->GlTextures[1], &decoder->GlxSurfaces[1]) != VA_STATUS_SUCCESS) { Fatal(_("video/glx: can't create glx surfaces\n")); } */ } #endif VaapiUpdateOutput(decoder); // // update OSD associate // #ifdef USE_GLX if (GlxEnabled) { return; } #endif VaapiAssociate(decoder); } /// /// Configure VA-API for new video format. /// /// @param decoder VA-API decoder /// static void VaapiSetupVideoProcessing(VaapiDecoder * decoder) { #if VA_CHECK_VERSION(0,33,99) VAProcFilterType filtertypes[VAProcFilterCount]; unsigned filtertype_n; unsigned u; unsigned v; VAProcFilterCap denoise_caps[1]; unsigned denoise_cap_n; VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount]; unsigned deinterlacing_cap_n; VABufferID denoise_filter; VABufferID deint_filter; VABufferID sharpen_filter; VABufferID color_filter; VABufferID filters[VAProcFilterCount]; unsigned filter_n; if (!VaapiVideoProcessing) { return; } // // display and filter infos. // filtertype_n = VAProcFilterCount; // API break this must be done vaQueryVideoProcFilters(VaDisplay, decoder->VaapiContext->context_id, filtertypes, &filtertype_n); for (u = 0; u < filtertype_n; ++u) { switch (filtertypes[u]) { case VAProcFilterNoiseReduction: Info("video/vaapi: noise reduction supported\n"); denoise_cap_n = 1; vaQueryVideoProcFilterCaps(VaDisplay, decoder->VaapiContext->context_id, VAProcFilterNoiseReduction, denoise_caps, &denoise_cap_n); if (denoise_cap_n) { Info("video/vaapi: %.2f - %.2f ++ %.2f = %.2f\n", denoise_caps->range.min_value, denoise_caps->range.max_value, denoise_caps->range.step, denoise_caps->range.default_value); } break; case VAProcFilterDeinterlacing: Info("video/vaapi: deinterlacing supported\n"); deinterlacing_cap_n = VAProcDeinterlacingCount; vaQueryVideoProcFilterCaps(VaDisplay, decoder->VaapiContext->context_id, VAProcFilterDeinterlacing, deinterlacing_caps, &deinterlacing_cap_n); for (v = 0; v < deinterlacing_cap_n; ++v) { switch (deinterlacing_caps[v].type) { case VAProcDeinterlacingBob: Info("video/vaapi: bob deinterlace supported\n"); break; case VAProcDeinterlacingWeave: Info("video/vaapi: weave deinterlace supported\n"); break; case VAProcDeinterlacingMotionAdaptive: Info("video/vaapi: motion adaptive deinterlace supported\n"); break; case VAProcDeinterlacingMotionCompensated: Info("video/vaapi: motion compensated deinterlace supported\n"); break; default: Info("video/vaapi: unsupported deinterlace #%02x\n", deinterlacing_caps[v].type); break; } } break; case VAProcFilterSharpening: Info("video/vaapi: sharpening supported\n"); break; case VAProcFilterColorBalance: Info("video/vaapi: color balance supported\n"); break; default: Info("video/vaapi: unsupported filter #%02x\n", filtertypes[u]); break; } } // // create pipeline filters // filter_n = 0; filtertype_n = VAProcFilterCount; vaQueryVideoProcFilters(VaDisplay, decoder->VaapiContext->context_id, filtertypes, &filtertype_n); for (u = 0; u < filtertype_n; ++u) { switch (filtertypes[u]) { case VAProcFilterNoiseReduction: break; case VAProcFilterDeinterlacing: break; case VAProcFilterSharpening: break; case VAProcFilterColorBalance: break; default: break; } } // // query pipeline caps // #endif } /// /// Get a free surface. Called from ffmpeg. /// /// @param decoder VA-API decoder /// @param video_ctx ffmpeg video codec context /// /// @returns the oldest free surface /// static VASurfaceID VaapiGetSurface(VaapiDecoder * decoder, const AVCodecContext * video_ctx) { #ifdef FFMPEG_BUG1_WORKAROUND // get_format not called with valid informations. if (video_ctx->width != decoder->InputWidth || video_ctx->height != decoder->InputHeight) { VAStatus status; decoder->InputWidth = video_ctx->width; decoder->InputHeight = video_ctx->height; decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiSetup(decoder, video_ctx); // create a configuration for the decode pipeline if ((status = vaCreateConfig(decoder->VaDisplay, decoder->Profile, decoder->Entrypoint, NULL, 0, &decoder->VaapiContext->config_id))) { Error(_("video/vaapi: can't create config '%s'\n"), vaErrorStr(status)); // bind surfaces to context } else if ((status = vaCreateContext(decoder->VaDisplay, decoder->VaapiContext->config_id, video_ctx->width, video_ctx->height, VA_PROGRESSIVE, decoder->SurfacesFree, decoder->SurfaceFreeN, &decoder->VaapiContext->context_id))) { Error(_("video/vaapi: can't create context '%s'\n"), vaErrorStr(status)); } // FIXME: too late to switch to software rending on failures VaapiSetupVideoProcessing(decoder); } #else (void)video_ctx; #endif return VaapiGetSurface0(decoder); } /// /// Find VA-API profile. /// /// Check if the requested profile is supported by VA-API. /// /// @param profiles a table of all supported profiles /// @param n number of supported profiles /// @param profile requested profile /// /// @returns the profile if supported, -1 if unsupported. /// static VAProfile VaapiFindProfile(const VAProfile * profiles, unsigned n, VAProfile profile) { unsigned u; for (u = 0; u < n; ++u) { if (profiles[u] == profile) { return profile; } } return -1; } /// /// Find VA-API entry point. /// /// Check if the requested entry point is supported by VA-API. /// /// @param entrypoints a table of all supported entrypoints /// @param n number of supported entrypoints /// @param entrypoint requested entrypoint /// /// @returns the entry point if supported, -1 if unsupported. /// static VAEntrypoint VaapiFindEntrypoint(const VAEntrypoint * entrypoints, unsigned n, VAEntrypoint entrypoint) { unsigned u; for (u = 0; u < n; ++u) { if (entrypoints[u] == entrypoint) { return entrypoint; } } return -1; } /// /// Callback to negotiate the PixelFormat. /// /// @param fmt is the list of formats which are supported by the codec, /// it is terminated by -1 as 0 is a valid format, the /// formats are ordered by quality. /// /// @note + 2 surface for software deinterlace /// static enum PixelFormat Vaapi_get_format(VaapiDecoder * decoder, AVCodecContext * video_ctx, const enum PixelFormat *fmt) { const enum PixelFormat *fmt_idx; VAProfile profiles[vaMaxNumProfiles(VaDisplay)]; int profile_n; VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int p; int e; VAConfigAttrib attrib; if (!VideoHardwareDecoder || (video_ctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && VideoHardwareDecoder == 1) ) { // hardware disabled by config Debug(3, "codec: hardware acceleration disabled\n"); goto slow_path; } p = -1; e = -1; // prepare va-api profiles if (vaQueryConfigProfiles(VaDisplay, profiles, &profile_n)) { Error(_("codec: vaQueryConfigProfiles failed")); goto slow_path; } Debug(3, "codec: %d profiles\n", profile_n); // check profile switch (video_ctx->codec_id) { case AV_CODEC_ID_MPEG2VIDEO: decoder->SurfacesNeeded = CODEC_SURFACES_MPEG2 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileMPEG2Main); break; case AV_CODEC_ID_MPEG4: case AV_CODEC_ID_H263: decoder->SurfacesNeeded = CODEC_SURFACES_MPEG4 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileMPEG4AdvancedSimple); break; case AV_CODEC_ID_H264: decoder->SurfacesNeeded = CODEC_SURFACES_H264 + VIDEO_SURFACES_MAX + 2; // try more simple formats, fallback to better if (video_ctx->profile == FF_PROFILE_H264_BASELINE) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Baseline); if (p == -1) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Main); } } else if (video_ctx->profile == FF_PROFILE_H264_MAIN) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Main); } if (p == -1) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264High); } break; case AV_CODEC_ID_WMV3: decoder->SurfacesNeeded = CODEC_SURFACES_VC1 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileVC1Main); break; case AV_CODEC_ID_VC1: decoder->SurfacesNeeded = CODEC_SURFACES_VC1 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileVC1Advanced); break; default: goto slow_path; } if (p == -1) { Debug(3, "\tno profile found\n"); goto slow_path; } Debug(3, "\tprofile %d\n", p); // prepare va-api entry points if (vaQueryConfigEntrypoints(VaDisplay, p, entrypoints, &entrypoint_n)) { Error(_("codec: vaQueryConfigEntrypoints failed")); goto slow_path; } Debug(3, "codec: %d entrypoints\n", entrypoint_n); // look through formats for (fmt_idx = fmt; *fmt_idx != PIX_FMT_NONE; fmt_idx++) { Debug(3, "\t%#010x %s\n", *fmt_idx, av_get_pix_fmt_name(*fmt_idx)); // check supported pixel format with entry point switch (*fmt_idx) { case PIX_FMT_VAAPI_VLD: e = VaapiFindEntrypoint(entrypoints, entrypoint_n, VAEntrypointVLD); break; case PIX_FMT_VAAPI_MOCO: case PIX_FMT_VAAPI_IDCT: Debug(3, "codec: this VA-API pixel format is not supported\n"); default: continue; } if (e != -1) { Debug(3, "\tentry point %d\n", e); break; } } if (e == -1) { Warning(_("codec: unsupported: slow path\n")); goto slow_path; } // // prepare decoder config // memset(&attrib, 0, sizeof(attrib)); attrib.type = VAConfigAttribRTFormat; if (vaGetConfigAttributes(decoder->VaDisplay, p, e, &attrib, 1)) { Error(_("codec: can't get attributes")); goto slow_path; } if (attrib.value & VA_RT_FORMAT_YUV420) { Info(_("codec: YUV 420 supported\n")); } if (attrib.value & VA_RT_FORMAT_YUV422) { Info(_("codec: YUV 422 supported\n")); } if (attrib.value & VA_RT_FORMAT_YUV444) { Info(_("codec: YUV 444 supported\n")); } if (!(attrib.value & VA_RT_FORMAT_YUV420)) { Warning(_("codec: YUV 420 not supported\n")); goto slow_path; } decoder->Profile = p; decoder->Entrypoint = e; decoder->PixFmt = *fmt_idx; decoder->InputWidth = 0; decoder->InputHeight = 0; #ifndef FFMPEG_BUG1_WORKAROUND if (video_ctx->width && video_ctx->height) { VAStatus status; decoder->InputWidth = video_ctx->width; decoder->InputHeight = video_ctx->height; decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiSetup(decoder, video_ctx); // FIXME: move the following into VaapiSetup // create a configuration for the decode pipeline if ((status = vaCreateConfig(decoder->VaDisplay, p, e, &attrib, 1, &decoder->VaapiContext->config_id))) { Error(_("codec: can't create config '%s'\n"), vaErrorStr(status)); goto slow_path; } // bind surfaces to context if ((status = vaCreateContext(decoder->VaDisplay, decoder->VaapiContext->config_id, video_ctx->width, video_ctx->height, VA_PROGRESSIVE, decoder->SurfacesFree, decoder->SurfaceFreeN, &decoder->VaapiContext->context_id))) { Error(_("codec: can't create context '%s'\n"), vaErrorStr(status)); goto slow_path; } VaapiSetupVideoProcessing(decoder); } #endif Debug(3, "\t%#010x %s\n", fmt_idx[0], av_get_pix_fmt_name(fmt_idx[0])); return *fmt_idx; slow_path: // no accelerated format found decoder->Profile = VA_INVALID_ID; decoder->Entrypoint = VA_INVALID_ID; decoder->VaapiContext->config_id = VA_INVALID_ID; decoder->SurfacesNeeded = VIDEO_SURFACES_MAX + 2; decoder->PixFmt = PIX_FMT_NONE; decoder->InputWidth = 0; decoder->InputHeight = 0; video_ctx->hwaccel_context = NULL; return avcodec_default_get_format(video_ctx, fmt); } /// /// Draw surface of the VA-API decoder with x11. /// /// vaPutSurface with intel backend does sync on v-sync. /// /// @param decoder VA-API decoder /// @param surface VA-API surface id /// @param interlaced flag interlaced source /// @param top_field_first flag top_field_first for interlaced source /// @param field interlaced draw: 0 first field, 1 second field /// static void VaapiPutSurfaceX11(VaapiDecoder * decoder, VASurfaceID surface, int interlaced, int top_field_first, int field) { unsigned type; VAStatus status; uint32_t s; uint32_t e; // deinterlace if (interlaced && VideoDeinterlace[decoder->Resolution] < VideoDeinterlaceSoftBob && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { if (top_field_first) { if (field) { type = VA_BOTTOM_FIELD; } else { type = VA_TOP_FIELD; } } else { if (field) { type = VA_TOP_FIELD; } else { type = VA_BOTTOM_FIELD; } } } else { type = VA_FRAME_PICTURE; } s = GetMsTicks(); xcb_flush(Connection); if ((status = vaPutSurface(decoder->VaDisplay, surface, decoder->Window, // decoder src decoder->CropX, decoder->CropY, decoder->CropWidth, decoder->CropHeight, // video dst decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, NULL, 0, type | decoder->SurfaceFlagsTable[decoder->Resolution])) != VA_STATUS_SUCCESS) { // switching video kills VdpPresentationQueueBlockUntilSurfaceIdle Error(_("video/vaapi: vaPutSurface failed %d\n"), status); } if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } e = GetMsTicks(); if (e - s > 2000) { Error(_("video/vaapi: gpu hung %dms %d\n"), e - s, decoder->FrameCounter); fprintf(stderr, _("video/vaapi: gpu hung %dms %d\n"), e - s, decoder->FrameCounter); } if (0) { // check if surface is really ready // VDPAU backend, says always ready VASurfaceStatus status; if (vaQuerySurfaceStatus(decoder->VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); status = VASurfaceReady; } if (status != VASurfaceReady) { Warning(_ ("video/vaapi: surface %#010x not ready: still displayed %d\n"), surface, status); return; } } if (0) { int i; // look how the status changes the next 40ms for (i = 0; i < 40; ++i) { VASurfaceStatus status; if (vaQuerySurfaceStatus(VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); } Debug(3, "video/vaapi: %2d %d\n", i, status); usleep(1 * 1000); } } usleep(1 * 1000); } #ifdef USE_GLX /// /// Draw surface of the VA-API decoder with glx. /// /// @param decoder VA-API decoder /// @param surface VA-API surface id /// @param interlaced flag interlaced source /// @param top_field_first flag top_field_first for interlaced source /// @param field interlaced draw: 0 first field, 1 second field /// static void VaapiPutSurfaceGLX(VaapiDecoder * decoder, VASurfaceID surface, int interlaced, int top_field_first, int field) { unsigned type; //uint32_t start; //uint32_t copy; //uint32_t end; // deinterlace if (interlaced && VideoDeinterlace[decoder->Resolution] < VideoDeinterlaceSoftBob && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { if (top_field_first) { if (field) { type = VA_BOTTOM_FIELD; } else { type = VA_TOP_FIELD; } } else { if (field) { type = VA_TOP_FIELD; } else { type = VA_BOTTOM_FIELD; } } } else { type = VA_FRAME_PICTURE; } //start = GetMsTicks(); if (vaCopySurfaceGLX(decoder->VaDisplay, decoder->GlxSurfaces[0], surface, type | decoder->SurfaceFlagsTable[decoder->Resolution]) != VA_STATUS_SUCCESS) { Error(_("video/glx: vaCopySurfaceGLX failed\n")); return; } //copy = GetMsTicks(); // hardware surfaces are always busy // FIXME: CropX, ... GlxRenderTexture(decoder->GlTextures[0], decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight); //end = GetMsTicks(); //Debug(3, "video/vaapi/glx: %d copy %d render\n", copy - start, end - copy); } #endif #ifdef USE_AUTOCROP /// /// VA-API auto-crop support. /// /// @param decoder VA-API hw decoder /// static void VaapiAutoCrop(VaapiDecoder * decoder) { VASurfaceID surface; uint32_t width; uint32_t height; void *va_image_data; void *data[3]; uint32_t pitches[3]; int crop14; int crop16; int next_state; int i; width = decoder->InputWidth; height = decoder->InputHeight; again: if (decoder->GetPutImage && decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; Debug(3, "video/vaapi: download image not available\n"); // FIXME: PixFmt not set! //VaapiFindImageFormat(decoder, decoder->PixFmt, format); VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); //VaapiFindImageFormat(decoder, PIX_FMT_YUV420P, format); if (vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); return; } } // no problem to go back, we just wrote it // FIXME: we can pass the surface through. surface = decoder->SurfacesRb[(decoder->SurfaceWrite + VIDEO_SURFACES_MAX - 1) % VIDEO_SURFACES_MAX]; // Copy data from frame to image if (!decoder->GetPutImage && vaDeriveImage(decoder->VaDisplay, surface, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed\n")); decoder->GetPutImage = 1; goto again; } if (decoder->GetPutImage && (i = vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, decoder->Image->image_id)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get auto-crop image %d\n"), i); printf(_("video/vaapi: can't get auto-crop image %d\n"), i); return; } if (vaMapBuffer(VaDisplay, decoder->Image->buf, &va_image_data) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map auto-crop image!\n")); return; } // convert vaapi to our frame format for (i = 0; (unsigned)i < decoder->Image->num_planes; ++i) { data[i] = va_image_data + decoder->Image->offsets[i]; pitches[i] = decoder->Image->pitches[i]; } AutoCropDetect(decoder->AutoCrop, width, height, data, pitches); if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap auto-crop image!\n")); } if (!decoder->GetPutImage) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } // FIXME: this a copy of vdpau, combine the two same things // ignore black frames if (decoder->AutoCrop->Y1 >= decoder->AutoCrop->Y2) { return; } crop14 = (decoder->InputWidth * decoder->InputAspect.num * 9) / (decoder->InputAspect.den * 14); crop14 = (decoder->InputHeight - crop14) / 2; crop16 = (decoder->InputWidth * decoder->InputAspect.num * 9) / (decoder->InputAspect.den * 16); crop16 = (decoder->InputHeight - crop16) / 2; if (decoder->AutoCrop->Y1 >= crop16 - AutoCropTolerance && decoder->InputHeight - decoder->AutoCrop->Y2 >= crop16 - AutoCropTolerance) { next_state = 16; } else if (decoder->AutoCrop->Y1 >= crop14 - AutoCropTolerance && decoder->InputHeight - decoder->AutoCrop->Y2 >= crop14 - AutoCropTolerance) { next_state = 14; } else { next_state = 0; } if (decoder->AutoCrop->State == next_state) { return; } Debug(3, "video: crop aspect %d:%d %d/%d %+d%+d\n", decoder->InputAspect.num, decoder->InputAspect.den, crop14, crop16, decoder->AutoCrop->Y1, decoder->InputHeight - decoder->AutoCrop->Y2); Debug(3, "video: crop aspect %d -> %d\n", decoder->AutoCrop->State, next_state); switch (decoder->AutoCrop->State) { case 16: case 14: if (decoder->AutoCrop->Count++ < AutoCropDelay / 2) { return; } break; case 0: if (decoder->AutoCrop->Count++ < AutoCropDelay) { return; } break; } decoder->AutoCrop->State = next_state; if (next_state) { decoder->CropX = VideoCutLeftRight[decoder->Resolution]; decoder->CropY = (next_state == 16 ? crop16 : crop14) + VideoCutTopBottom[decoder->Resolution]; decoder->CropWidth = decoder->InputWidth - decoder->CropX * 2; decoder->CropHeight = decoder->InputHeight - decoder->CropY * 2; // FIXME: this overwrites user choosen output position // FIXME: resize kills the auto crop values // FIXME: support other 4:3 zoom modes decoder->OutputX = decoder->VideoX; decoder->OutputY = decoder->VideoY; decoder->OutputWidth = (decoder->VideoHeight * next_state) / 9; decoder->OutputHeight = (decoder->VideoWidth * 9) / next_state; if (decoder->OutputWidth > decoder->VideoWidth) { decoder->OutputWidth = decoder->VideoWidth; decoder->OutputY = (decoder->VideoHeight - decoder->OutputHeight) / 2; } else if (decoder->OutputHeight > decoder->VideoHeight) { decoder->OutputHeight = decoder->VideoHeight; decoder->OutputX = (decoder->VideoWidth - decoder->OutputWidth) / 2; } Debug(3, "video: aspect output %dx%d %dx%d%+d%+d\n", decoder->InputWidth, decoder->InputHeight, decoder->OutputWidth, decoder->OutputHeight, decoder->OutputX, decoder->OutputY); } else { // sets AutoCrop->Count VaapiUpdateOutput(decoder); } decoder->AutoCrop->Count = 0; // // update OSD associate // VaapiDeassociate(decoder); VaapiAssociate(decoder); } /// /// VA-API check if auto-crop todo. /// /// @param decoder VA-API hw decoder /// /// @note a copy of VdpauCheckAutoCrop /// @note auto-crop only supported with normal 4:3 display mode /// static void VaapiCheckAutoCrop(VaapiDecoder * decoder) { // reduce load, check only n frames if (Video4to3ZoomMode == VideoNormal && AutoCropInterval && !(decoder->FrameCounter % AutoCropInterval)) { AVRational input_aspect_ratio; AVRational tmp_ratio; av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, decoder->InputWidth * decoder->InputAspect.num, decoder->InputHeight * decoder->InputAspect.den, 1024 * 1024); tmp_ratio.num = 4; tmp_ratio.den = 3; // only 4:3 with 16:9/14:9 inside supported if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) { VaapiAutoCrop(decoder); } else { decoder->AutoCrop->Count = 0; decoder->AutoCrop->State = 0; } } } /// /// VA-API reset auto-crop. /// static void VaapiResetAutoCrop(void) { int i; for (i = 0; i < VaapiDecoderN; ++i) { VaapiDecoders[i]->AutoCrop->State = 0; VaapiDecoders[i]->AutoCrop->Count = 0; } } #endif /// /// Queue output surface. /// /// @param decoder VA-API decoder /// @param surface output surface /// @param softdec software decoder /// /// @note we can't mix software and hardware decoder surfaces /// static void VaapiQueueSurface(VaapiDecoder * decoder, VASurfaceID surface, int softdec) { VASurfaceID old; ++decoder->FrameCounter; if (1) { // can't wait for output queue empty if (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) { ++decoder->FramesDropped; Warning(_("video: output buffer full, dropping frame (%d/%d)\n"), decoder->FramesDropped, decoder->FrameCounter); if (!(decoder->FramesDisplayed % 300)) { VaapiPrintFrames(decoder); } if (softdec) { // software surfaces only VaapiReleaseSurface(decoder, surface); } return; } #if 0 } else { // wait for output queue empty while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) { VideoDisplayHandler(); } #endif } // // Check and release, old surface // if ((old = decoder->SurfacesRb[decoder->SurfaceWrite]) != VA_INVALID_ID) { #if 0 if (vaSyncSurface(decoder->VaDisplay, old) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } VASurfaceStatus status; if (vaQuerySurfaceStatus(decoder->VaDisplay, old, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); status = VASurfaceReady; } if (status != VASurfaceReady) { Warning(_ ("video/vaapi: surface %#010x not ready: still displayed %d\n"), old, status); if (0 && vaSyncSurface(decoder->VaDisplay, old) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #endif // now we can release the surface if (softdec) { // software surfaces only VaapiReleaseSurface(decoder, old); } } #if 0 // FIXME: intel seems to forget this, nvidia GT 210 has speed problems here if (VaapiBuggyIntel && VaOsdSubpicture != VA_INVALID_ID) { // FIXME: associate only if osd is displayed // // associate the OSD with surface // if (VaapiUnscaledOsd) { if (vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, &surface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } else { // FIXME: auto-crop wrong position if (vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, &surface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, decoder->InputWidth, decoder->InputHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } } #endif decoder->SurfacesRb[decoder->SurfaceWrite] = surface; decoder->SurfaceWrite = (decoder->SurfaceWrite + 1) % VIDEO_SURFACES_MAX; atomic_inc(&decoder->SurfacesFilled); Debug(4, "video/vaapi: yy video surface %#010x ready\n", surface); } /// /// Create and display a black empty surface. /// /// @param decoder VA-API decoder /// static void VaapiBlackSurface(VaapiDecoder * decoder) { VAStatus status; #ifdef DEBUG uint32_t start; #endif uint32_t sync; uint32_t put1; #ifdef USE_GLX if (GlxEnabled) { // already done return; } #endif // wait until we have osd subpicture if (VaOsdSubpicture == VA_INVALID_ID) { Warning(_("video/vaapi: no osd subpicture yet\n")); return; } if (decoder->BlackSurface == VA_INVALID_ID) { uint8_t *va_image_data; unsigned u; status = vaCreateSurfaces(decoder->VaDisplay, VA_RT_FORMAT_YUV420, VideoWindowWidth, VideoWindowHeight, &decoder->BlackSurface, 1, NULL, 0); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create a surface: %s\n"), vaErrorStr(status)); return; } // full sized surface, no difference unscaled/scaled osd status = vaAssociateSubpicture(decoder->VaDisplay, VaOsdSubpicture, &decoder->BlackSurface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, VideoWindowWidth, VideoWindowHeight, 0); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture: %s\n"), vaErrorStr(status)); } Debug(3, "video/vaapi: associate %08x\n", decoder->BlackSurface); if (decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); status = vaCreateImage(VaDisplay, format, VideoWindowWidth, VideoWindowHeight, decoder->Image); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image: %s\n"), vaErrorStr(status)); return; } } status = vaMapBuffer(VaDisplay, decoder->Image->buf, (void **)&va_image_data); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map the image: %s\n"), vaErrorStr(status)); return; } for (u = 0; u < decoder->Image->data_size; ++u) { if (u < decoder->Image->offsets[1]) { va_image_data[u] = 0x00; // Y } else if (u % 2 == 0) { va_image_data[u] = 0x80; // U } else { #ifdef DEBUG // make black surface visible va_image_data[u] = 0xFF; // V #else va_image_data[u] = 0x80; // V #endif } } if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap the image!\n")); } if (decoder->GetPutImage) { status = vaPutImage(VaDisplay, decoder->BlackSurface, decoder->Image->image_id, 0, 0, VideoWindowWidth, VideoWindowHeight, 0, 0, VideoWindowWidth, VideoWindowHeight); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image!\n")); } } else { // FIXME: PutImage isn't always supported Debug(3, "video/vaapi: put image not supported, alternative path not written\n"); } #ifdef DEBUG start = GetMsTicks(); #endif if (vaSyncSurface(decoder->VaDisplay, decoder->BlackSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } else { #ifdef DEBUG start = GetMsTicks(); #endif } Debug(4, "video/vaapi: yy black video surface %#010x displayed\n", decoder->BlackSurface); sync = GetMsTicks(); xcb_flush(Connection); if ((status = vaPutSurface(decoder->VaDisplay, decoder->BlackSurface, decoder->Window, // decoder src decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, // video dst decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, NULL, 0, VA_FRAME_PICTURE)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed %d\n"), status); } clock_gettime(CLOCK_MONOTONIC, &decoder->FrameTime); put1 = GetMsTicks(); if (put1 - sync > 2000) { Error(_("video/vaapi: gpu hung %dms %d\n"), put1 - sync, decoder->FrameCounter); fprintf(stderr, _("video/vaapi: gpu hung %dms %d\n"), put1 - sync, decoder->FrameCounter); } Debug(4, "video/vaapi: sync %2u put1 %2u\n", sync - start, put1 - sync); if (0 && vaSyncSurface(decoder->VaDisplay, decoder->BlackSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } usleep(1 * 1000); } #define noUSE_VECTOR ///< use gcc vector extension #ifdef USE_VECTOR typedef char v16qi __attribute__ ((vector_size(16))); typedef char v8qi __attribute__ ((vector_size(8))); typedef int16_t v4hi __attribute__ ((vector_size(4))); typedef int16_t v8hi __attribute__ ((vector_size(8))); /// /// ELA Edge-based Line Averaging /// Low-Complexity Interpolation Method /// /// abcdefg abcdefg abcdefg abcdefg abcdefg /// x x x x x /// hijklmn hijklmn hijklmn hijklmn hijklmn /// static void FilterLineSpatial(uint8_t * dst, const uint8_t * cur, int width, int above, int below, int next) { int x; // 8/16 128bit xmm register for (x = 0; x < width; x += 8) { v8qi c; v8qi d; v8qi e; v8qi j; v8qi k; v8qi l; v8qi t1; v8qi t2; v8qi pred; v8qi score_l; v8qi score_h; v8qi t_l; v8qi t_h; v8qi zero; // ignore bound violation d = *(v8qi *) & cur[above + x]; k = *(v8qi *) & cur[below + x]; pred = __builtin_ia32_pavgb(d, k); // score = ABS(c - j) + ABS(d - k) + ABS(e - l); c = *(v8qi *) & cur[above + x - 1 * next]; e = *(v8qi *) & cur[above + x + 1 * next]; j = *(v8qi *) & cur[below + x - 1 * next]; l = *(v8qi *) & cur[below + x + 1 * next]; t1 = __builtin_ia32_psubusb(c, j); t2 = __builtin_ia32_psubusb(j, c); t1 = __builtin_ia32_pmaxub(t1, t2); zero ^= zero; score_l = __builtin_ia32_punpcklbw(t1, zero); score_h = __builtin_ia32_punpckhbw(t1, zero); t1 = __builtin_ia32_psubusb(d, k); t2 = __builtin_ia32_psubusb(k, d); t1 = __builtin_ia32_pmaxub(t1, t2); t_l = __builtin_ia32_punpcklbw(t1, zero); t_h = __builtin_ia32_punpckhbw(t1, zero); score_l = __builtin_ia32_paddw(score_l, t_l); score_h = __builtin_ia32_paddw(score_h, t_h); t1 = __builtin_ia32_psubusb(e, l); t2 = __builtin_ia32_psubusb(l, e); t1 = __builtin_ia32_pmaxub(t1, t2); t_l = __builtin_ia32_punpcklbw(t1, zero); t_h = __builtin_ia32_punpckhbw(t1, zero); score_l = __builtin_ia32_paddw(score_l, t_l); score_h = __builtin_ia32_paddw(score_h, t_h); *(v8qi *) & dst[x] = pred; } } #else /// Return the absolute value of an integer. #define ABS(i) ((i) >= 0 ? (i) : (-(i))) /// /// ELA Edge-based Line Averaging /// Low-Complexity Interpolation Method /// /// abcdefg abcdefg abcdefg abcdefg abcdefg /// x x x x x /// hijklmn hijklmn hijklmn hijklmn hijklmn /// static void FilterLineSpatial(uint8_t * dst, const uint8_t * cur, int width, int above, int below, int next) { int a, b, c, d, e, f, g, h, i, j, k, l, m, n; int spatial_pred; int spatial_score; int score; int x; for (x = 0; x < width; ++x) { a = cur[above + x - 3 * next]; // ignore bound violation b = cur[above + x - 2 * next]; c = cur[above + x - 1 * next]; d = cur[above + x + 0 * next]; e = cur[above + x + 1 * next]; f = cur[above + x + 2 * next]; g = cur[above + x + 3 * next]; h = cur[below + x - 3 * next]; i = cur[below + x - 2 * next]; j = cur[below + x - 1 * next]; k = cur[below + x + 0 * next]; l = cur[below + x + 1 * next]; m = cur[below + x + 2 * next]; n = cur[below + x + 3 * next]; spatial_pred = (d + k) / 2; // 0 pixel spatial_score = ABS(c - j) + ABS(d - k) + ABS(e - l); score = ABS(b - k) + ABS(c - l) + ABS(d - m); if (score < spatial_score) { spatial_pred = (c + l) / 2; // 1 pixel spatial_score = score; score = ABS(a - l) + ABS(b - m) + ABS(c - n); if (score < spatial_score) { spatial_pred = (b + m) / 2; // 2 pixel spatial_score = score; } } score = ABS(d - i) + ABS(e - j) + ABS(f - k); if (score < spatial_score) { spatial_pred = (e + j) / 2; // -1 pixel spatial_score = score; score = ABS(e - h) + ABS(f - i) + ABS(g - j); if (score < spatial_score) { spatial_pred = (f + i) / 2; // -2 pixel spatial_score = score; } } dst[x + 0] = spatial_pred; } } #endif /// /// Vaapi spatial deinterlace. /// /// @note FIXME: use common software deinterlace functions. /// static void VaapiSpatial(VaapiDecoder * decoder, VAImage * src, VAImage * dst1, VAImage * dst2) { #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; uint32_t tick6; uint32_t tick7; uint32_t tick8; #endif void *src_base; void *dst1_base; void *dst2_base; unsigned y; unsigned p; uint8_t *tmp; int pitch; int width; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, src->buf, &src_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick2 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst1->buf, &dst1_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick3 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst2->buf, &dst2_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick4 = GetMsTicks(); #endif if (0) { // test all updated memset(dst1_base, 0x00, dst1->data_size); memset(dst2_base, 0xFF, dst2->data_size); } // use tmp copy FIXME: only for intel needed tmp = malloc(src->data_size); memcpy(tmp, src_base, src->data_size); if (src->num_planes == 2) { // NV12 pitch = src->pitches[0]; width = src->width; for (y = 0; y < (unsigned)src->height; y++) { // Y const uint8_t *cur; cur = tmp + src->offsets[0] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[0] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[0] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height ? pitch : -pitch, 1); } else { // copy to 1st memcpy(dst1_base + src->offsets[0] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[0] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height ? pitch : -pitch, 1); } } if (VideoSkipChromaDeinterlace[decoder->Resolution]) { for (y = 0; y < (unsigned)src->height / 2; y++) { // UV const uint8_t *cur; cur = tmp + src->offsets[1] + y * pitch; // copy to 1st memcpy(dst1_base + src->offsets[1] + y * pitch, cur, width); // copy to 2nd memcpy(dst2_base + src->offsets[1] + y * pitch, cur, width); } } else { for (y = 0; y < (unsigned)src->height / 2; y++) { // UV const uint8_t *cur; cur = tmp + src->offsets[1] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[1] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[1] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height / 2 ? pitch : -pitch, 2); } else { // copy to 1st memcpy(dst1_base + src->offsets[1] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[1] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height / 2 ? pitch : -pitch, 2); } } } } else { // YV12 or I420 for (p = 0; p < src->num_planes; ++p) { pitch = src->pitches[p]; width = src->width >> (p != 0); if (VideoSkipChromaDeinterlace[decoder->Resolution] && p) { for (y = 0; y < (unsigned)(src->height >> 1); y++) { const uint8_t *cur; cur = tmp + src->offsets[p] + y * pitch; // copy to 1st memcpy(dst1_base + src->offsets[p] + y * pitch, cur, width); // copy to 2nd memcpy(dst2_base + src->offsets[p] + y * pitch, cur, width); } } else { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y++) { const uint8_t *cur; cur = tmp + src->offsets[p] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[p] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[p] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)(src->height >> (p != 0)) ? pitch : -pitch, 1); } else { // copy to 1st memcpy(dst1_base + src->offsets[p] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[p] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)(src->height >> (p != 0)) ? pitch : -pitch, 1); } } } } } free(tmp); #ifdef DEBUG tick5 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst2->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick6 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst1->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick7 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, src->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick8 = GetMsTicks(); Debug(3, "video/vaapi: map=%2d/%2d/%2d deint=%2d umap=%2d/%2d/%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4, tick6 - tick5, tick7 - tick6, tick8 - tick7); #endif } /// /// Vaapi bob deinterlace. /// /// @note FIXME: use common software deinterlace functions. /// static void VaapiBob(VaapiDecoder * decoder, VAImage * src, VAImage * dst1, VAImage * dst2) { #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; uint32_t tick6; uint32_t tick7; uint32_t tick8; #endif void *src_base; void *dst1_base; void *dst2_base; unsigned y; unsigned p; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, src->buf, &src_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick2 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst1->buf, &dst1_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick3 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst2->buf, &dst2_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick4 = GetMsTicks(); #endif if (0) { // test all updated memset(dst1_base, 0x00, dst1->data_size); memset(dst2_base, 0xFF, dst2->data_size); return; } #if 0 // interleave for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } #endif #if 1 // use tmp copy if (1) { uint8_t *tmp; tmp = malloc(src->data_size); memcpy(tmp, src_base, src->data_size); for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], tmp + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], tmp + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], tmp + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], tmp + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } free(tmp); } #endif #if 0 // use multiple tmp copy if (1) { uint8_t *tmp_src; uint8_t *tmp_dst1; uint8_t *tmp_dst2; tmp_src = malloc(src->data_size); memcpy(tmp_src, src_base, src->data_size); tmp_dst1 = malloc(src->data_size); tmp_dst2 = malloc(src->data_size); for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(tmp_dst1 + src->offsets[p] + (y + 0) * src->pitches[p], tmp_src + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst1 + src->offsets[p] + (y + 1) * src->pitches[p], tmp_src + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst2 + src->offsets[p] + (y + 0) * src->pitches[p], tmp_src + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst2 + src->offsets[p] + (y + 1) * src->pitches[p], tmp_src + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } memcpy(dst1_base, tmp_dst1, src->data_size); memcpy(dst2_base, tmp_dst2, src->data_size); free(tmp_src); free(tmp_dst1); free(tmp_dst2); } #endif #if 0 // dst1 first for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); } } // dst2 next for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } #endif #ifdef DEBUG tick5 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst2->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick6 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst1->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick7 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, src->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick8 = GetMsTicks(); Debug(4, "video/vaapi: map=%2d/%2d/%2d deint=%2d umap=%2d/%2d/%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4, tick6 - tick5, tick7 - tick6, tick8 - tick7); #endif } /// /// Create software deinterlace images. /// /// @param decoder VA-API decoder /// static void VaapiCreateDeinterlaceImages(VaapiDecoder * decoder) { VAImageFormat format[1]; int i; // NV12, YV12, I420, BGRA // NV12 Y U/V 2x2 // YV12 Y V U 2x2 // I420 Y U V 2x2 // Intel needs NV12 VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); //VaapiFindImageFormat(decoder, PIX_FMT_YUV420P, format); for (i = 0; i < 5; ++i) { if (vaCreateImage(decoder->VaDisplay, format, decoder->InputWidth, decoder->InputHeight, decoder->DeintImages + i) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } #ifdef DEBUG if (1) { VAImage *img; img = decoder->DeintImages; Debug(3, "video/vaapi: %c%c%c%c %dx%d*%d\n", img->format.fourcc, img->format.fourcc >> 8, img->format.fourcc >> 16, img->format.fourcc >> 24, img->width, img->height, img->num_planes); } #endif } /// /// Destroy software deinterlace images. /// /// @param decoder VA-API decoder /// static void VaapiDestroyDeinterlaceImages(VaapiDecoder * decoder) { int i; for (i = 0; i < 5; ++i) { if (vaDestroyImage(decoder->VaDisplay, decoder->DeintImages[i].image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->DeintImages[i].image_id = VA_INVALID_ID; } } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuDerive(VaapiDecoder * decoder, VASurfaceID surface) { // // vaPutImage not working, vaDeriveImage // #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; #endif VAImage image[1]; VAImage dest1[1]; VAImage dest2[1]; VAStatus status; VASurfaceID out1; VASurfaceID out2; #ifdef DEBUG tick1 = GetMsTicks(); #endif #if 0 // get image test if (decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); if (vaCreateImage(VaDisplay, format, decoder->InputWidth, decoder->InputHeight, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } if (vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get source image\n")); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } *image = *decoder->Image; #else if ((status = vaDeriveImage(decoder->VaDisplay, surface, image)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } #endif #ifdef DEBUG tick2 = GetMsTicks(); #endif Debug(4, "video/vaapi: %c%c%c%c %dx%d*%d\n", image->format.fourcc, image->format.fourcc >> 8, image->format.fourcc >> 16, image->format.fourcc >> 24, image->width, image->height, image->num_planes); // get a free surfaces out1 = VaapiGetSurface0(decoder); if (out1 == VA_INVALID_ID) { abort(); } if ((status = vaDeriveImage(decoder->VaDisplay, out1, dest1)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); } #ifdef DEBUG tick3 = GetMsTicks(); #endif out2 = VaapiGetSurface0(decoder); if (out2 == VA_INVALID_ID) { abort(); } if ((status = vaDeriveImage(decoder->VaDisplay, out2, dest2)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); } #ifdef DEBUG tick4 = GetMsTicks(); #endif switch (VideoDeinterlace[decoder->Resolution]) { case VideoDeinterlaceSoftBob: default: VaapiBob(decoder, image, dest1, dest2); break; case VideoDeinterlaceSoftSpatial: VaapiSpatial(decoder, image, dest1, dest2); break; } #ifdef DEBUG tick5 = GetMsTicks(); #endif #if 1 if (vaDestroyImage(VaDisplay, image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } #endif if (vaDestroyImage(VaDisplay, dest1->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } if (vaDestroyImage(VaDisplay, dest2->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } VaapiQueueSurface(decoder, out1, 1); VaapiQueueSurface(decoder, out2, 1); #ifdef DEBUG tick5 = GetMsTicks(); Debug(4, "video/vaapi: get=%2d get1=%2d get2=%d deint=%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4); #endif } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuPut(VaapiDecoder * decoder, VASurfaceID surface) { // // vaPutImage working // #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; #endif VAImage *img1; VAImage *img2; VAImage *img3; VASurfaceID out; VAStatus status; // // Create deinterlace images. // if (decoder->DeintImages[0].image_id == VA_INVALID_ID) { VaapiCreateDeinterlaceImages(decoder); } if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } img1 = decoder->DeintImages; img2 = decoder->DeintImages + 1; img3 = decoder->DeintImages + 2; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, img1->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get source image\n")); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } #ifdef DEBUG tick2 = GetMsTicks(); #endif // FIXME: handle top_field_first switch (VideoDeinterlace[decoder->Resolution]) { case VideoDeinterlaceSoftBob: default: VaapiBob(decoder, img1, img2, img3); break; case VideoDeinterlaceSoftSpatial: VaapiSpatial(decoder, img1, img2, img3); break; } #ifdef DEBUG tick3 = GetMsTicks(); #endif // get a free surface and upload the image out = VaapiGetSurface0(decoder); if (out == VA_INVALID_ID) { abort(); } if ((status = vaPutImage(VaDisplay, out, img2->image_id, 0, 0, img2->width, img2->height, 0, 0, img2->width, img2->height)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image: %d!\n"), status); abort(); } VaapiQueueSurface(decoder, out, 1); if (0 && vaSyncSurface(decoder->VaDisplay, out) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } #ifdef DEBUG tick4 = GetMsTicks(); Debug(4, "video/vaapi: deint %d %#010x -> %#010x\n", decoder->SurfaceField, surface, out); #endif // get a free surface and upload the image out = VaapiGetSurface0(decoder); if (out == VA_INVALID_ID) { abort(); } if (vaPutImage(VaDisplay, out, img3->image_id, 0, 0, img3->width, img3->height, 0, 0, img3->width, img3->height) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image!\n")); } VaapiQueueSurface(decoder, out, 1); if (0 && vaSyncSurface(decoder->VaDisplay, out) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } #ifdef DEBUG tick5 = GetMsTicks(); Debug(4, "video/vaapi: get=%2d deint=%2d put1=%2d put2=%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4); #endif } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuDeinterlace(VaapiDecoder * decoder, VASurfaceID surface) { if (decoder->GetPutImage) { VaapiCpuPut(decoder, surface); } else { VaapiCpuDerive(decoder, surface); } // FIXME: must release software input surface } /// /// Render a ffmpeg frame /// /// @param decoder VA-API decoder /// @param video_ctx ffmpeg video codec context /// @param frame frame to display /// static void VaapiRenderFrame(VaapiDecoder * decoder, const AVCodecContext * video_ctx, const AVFrame * frame) { VASurfaceID surface; int interlaced; // FIXME: some tv-stations toggle interlace on/off // frame->interlaced_frame isn't always correct set interlaced = frame->interlaced_frame; if (video_ctx->height == 720) { if (interlaced && !decoder->WrongInterlacedWarned) { Debug(3, "video/vaapi: wrong interlace flag fixed\n"); decoder->WrongInterlacedWarned = 1; } interlaced = 0; } else { if (!interlaced && !decoder->WrongInterlacedWarned) { Debug(3, "video/vaapi: wrong interlace flag fixed\n"); decoder->WrongInterlacedWarned = 1; } interlaced = 1; } // FIXME: should be done by init video_ctx->field_order if (decoder->Interlaced != interlaced || decoder->TopFieldFirst != frame->top_field_first) { #if 0 // field_order only in git Debug(3, "video/vaapi: interlaced %d top-field-first %d - %d\n", interlaced, frame->top_field_first, video_ctx->field_order); #else Debug(3, "video/vaapi: interlaced %d top-field-first %d\n", interlaced, frame->top_field_first); #endif decoder->Interlaced = interlaced; decoder->TopFieldFirst = frame->top_field_first; decoder->SurfaceField = 0; } // update aspect ratio changes #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53,60,100) if (decoder->InputWidth && decoder->InputHeight && av_cmp_q(decoder->InputAspect, frame->sample_aspect_ratio)) { Debug(3, "video/vaapi: aspect ratio changed\n"); decoder->InputAspect = frame->sample_aspect_ratio; VaapiUpdateOutput(decoder); } #else if (decoder->InputWidth && decoder->InputHeight && av_cmp_q(decoder->InputAspect, video_ctx->sample_aspect_ratio)) { Debug(3, "video/vaapi: aspect ratio changed\n"); decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiUpdateOutput(decoder); } #endif // // Hardware render // if (video_ctx->hwaccel_context) { if (video_ctx->height != decoder->InputHeight || video_ctx->width != decoder->InputWidth) { Error(_("video/vaapi: stream <-> surface size mismatch\n")); return; } surface = (unsigned)(size_t) frame->data[3]; Debug(4, "video/vaapi: hw render hw surface %#010x\n", surface); if (interlaced && VideoDeinterlace[decoder->Resolution] >= VideoDeinterlaceSoftBob) { VaapiCpuDeinterlace(decoder, surface); } else { VaapiQueueSurface(decoder, surface, 0); } // // VAImage render // } else { void *va_image_data; int i; AVPicture picture[1]; int width; int height; Debug(4, "video/vaapi: hw render sw surface\n"); width = video_ctx->width; height = video_ctx->height; // // Check image, format, size // if ((decoder->GetPutImage && decoder->Image->image_id == VA_INVALID_ID) || decoder->PixFmt != video_ctx->pix_fmt || width != decoder->InputWidth || height != decoder->InputHeight) { Debug(3, "video/vaapi: stream <-> surface size/interlace mismatch\n"); decoder->PixFmt = video_ctx->pix_fmt; // FIXME: aspect done above! decoder->InputWidth = width; decoder->InputHeight = height; VaapiSetup(decoder, video_ctx); } // FIXME: Need to insert software deinterlace here // FIXME: can/must insert auto-crop here (is done after upload) // get a free surface and upload the image surface = VaapiGetSurface0(decoder); Debug(4, "video/vaapi: video surface %#010x displayed\n", surface); if (!decoder->GetPutImage && vaDeriveImage(decoder->VaDisplay, surface, decoder->Image) != VA_STATUS_SUCCESS) { VAImageFormat format[1]; Error(_("video/vaapi: vaDeriveImage failed\n")); decoder->GetPutImage = 1; VaapiFindImageFormat(decoder, decoder->PixFmt, format); if (vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } // // Copy data from frame to image // if (vaMapBuffer(VaDisplay, decoder->Image->buf, &va_image_data) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map the image!\n")); } // crazy: intel mixes YV12 and NV12 with mpeg if (decoder->Image->format.fourcc == VA_FOURCC_NV12) { int x; // intel NV12 convert YV12 to NV12 // copy Y for (i = 0; i < height; ++i) { memcpy(va_image_data + decoder->Image->offsets[0] + decoder->Image->pitches[0] * i, frame->data[0] + frame->linesize[0] * i, frame->linesize[0]); } // copy UV for (i = 0; i < height / 2; ++i) { for (x = 0; x < width / 2; ++x) { ((uint8_t *) va_image_data)[decoder->Image->offsets[1] + decoder->Image->pitches[1] * i + x * 2 + 0] = frame->data[1][i * frame->linesize[1] + x]; ((uint8_t *) va_image_data)[decoder->Image->offsets[1] + decoder->Image->pitches[1] * i + x * 2 + 1] = frame->data[2][i * frame->linesize[2] + x]; } } // vdpau uses this } else if (decoder->Image->format.fourcc == VA_FOURCC('I', '4', '2', '0')) { picture->data[0] = va_image_data + decoder->Image->offsets[0]; picture->linesize[0] = decoder->Image->pitches[0]; picture->data[1] = va_image_data + decoder->Image->offsets[1]; picture->linesize[1] = decoder->Image->pitches[2]; picture->data[2] = va_image_data + decoder->Image->offsets[2]; picture->linesize[2] = decoder->Image->pitches[1]; av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt, width, height); } else if (decoder->Image->num_planes == 3) { picture->data[0] = va_image_data + decoder->Image->offsets[0]; picture->linesize[0] = decoder->Image->pitches[0]; picture->data[1] = va_image_data + decoder->Image->offsets[2]; picture->linesize[1] = decoder->Image->pitches[2]; picture->data[2] = va_image_data + decoder->Image->offsets[1]; picture->linesize[2] = decoder->Image->pitches[1]; av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt, width, height); } if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap the image!\n")); } Debug(4, "video/vaapi: buffer %dx%d <- %dx%d\n", decoder->Image->width, decoder->Image->height, width, height); if (decoder->GetPutImage && (i = vaPutImage(VaDisplay, surface, decoder->Image->image_id, 0, 0, width, height, 0, 0, width, height)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image err:%d!\n"), i); } if (!decoder->GetPutImage) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } VaapiQueueSurface(decoder, surface, 1); } if (decoder->Interlaced) { ++decoder->FrameCounter; } } /// /// Get hwaccel context for ffmpeg. /// /// @param decoder VA-API hw decoder /// static void *VaapiGetHwAccelContext(VaapiDecoder * decoder) { return decoder->VaapiContext; } /// /// Advance displayed frame of decoder. /// /// @param decoder VA-API hw decoder /// static void VaapiAdvanceDecoderFrame(VaapiDecoder * decoder) { // next surface, if complete frame is displayed (1 -> 0) if (decoder->SurfaceField) { VASurfaceID surface; int filled; filled = atomic_read(&decoder->SurfacesFilled); // FIXME: this should check the caller // check decoder, if new surface is available if (filled <= 1) { // keep use of last surface ++decoder->FramesDuped; // FIXME: don't warn after stream start, don't warn during pause Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"), decoder->FramesDuped, decoder->FrameCounter, VideoGetBuffers(decoder->Stream)); return; } // wait for rendering finished surface = decoder->SurfacesRb[decoder->SurfaceRead]; if (vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX; atomic_dec(&decoder->SurfacesFilled); // progressiv oder software deinterlacer decoder->SurfaceField = !decoder->Interlaced || VideoDeinterlace[decoder->Resolution] >= VideoDeinterlaceSoftBob; return; } decoder->SurfaceField = 1; } /// /// Display a video frame. /// /// @todo FIXME: add detection of missed frames /// static void VaapiDisplayFrame(void) { struct timespec nowtime; #ifdef DEBUG uint32_t start; uint32_t put1; uint32_t put2; #endif int i; VaapiDecoder *decoder; if (VideoSurfaceModesChanged) { // handle changed modes VideoSurfaceModesChanged = 0; for (i = 0; i < VaapiDecoderN; ++i) { VaapiInitSurfaceFlags(VaapiDecoders[i]); } } // look if any stream have a new surface available for (i = 0; i < VaapiDecoderN; ++i) { VASurfaceID surface; int filled; decoder = VaapiDecoders[i]; decoder->FramesDisplayed++; decoder->StartCounter++; #ifdef VA_EXP // wait for display finished if (decoder->LastSurface != VA_INVALID_ID) { if (vaSyncSurface(decoder->VaDisplay, decoder->LastSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #endif filled = atomic_read(&decoder->SurfacesFilled); // no surface availble show black with possible osd if (!filled) { VaapiBlackSurface(decoder); #ifdef VA_EXP decoder->LastSurface = decoder->BlackSurface; #endif VaapiMessage(3, "video/vaapi: black surface displayed\n"); #ifdef USE_SCREENSAVER if (EnableDPMSatBlackScreen && DPMSDisabled) { Debug(3, "Black surface, DPMS enabled"); X11DPMSReenable(Connection); X11SuspendScreenSaver(Connection, 1); } #endif continue; #ifdef USE_SCREENSAVER } else if (!DPMSDisabled) { // always disable Debug(3, "DPMS disabled"); X11DPMSDisable(Connection); X11SuspendScreenSaver(Connection, 0); #endif } surface = decoder->SurfacesRb[decoder->SurfaceRead]; #ifdef VA_EXP decoder->LastSurface = surface; #endif #ifdef DEBUG if (surface == VA_INVALID_ID) { printf(_("video/vaapi: invalid surface in ringbuffer\n")); } Debug(4, "video/vaapi: yy video surface %#010x displayed\n", surface); start = GetMsTicks(); #endif // VDPAU driver + INTEL driver does no v-sync with 1080 if (0 && decoder->Interlaced // FIXME: buggy libva-driver-vdpau, buggy libva-driver-intel && (VaapiBuggyVdpau || (0 && VaapiBuggyIntel && decoder->InputHeight == 1080)) && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, 0); #ifdef DEBUG put1 = GetMsTicks(); #endif VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, 1); #ifdef DEBUG put2 = GetMsTicks(); #endif } else { #ifdef USE_GLX if (GlxEnabled) { VaapiPutSurfaceGLX(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, decoder->SurfaceField); } else #endif { VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, decoder->SurfaceField); } #ifdef DEBUG put1 = GetMsTicks(); put2 = put1; #endif } clock_gettime(CLOCK_MONOTONIC, &nowtime); // FIXME: 31 only correct for 50Hz if ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec) > 31 * 1000 * 1000) { // FIXME: ignore still-frame, trick-speed Debug(3, "video/vaapi: time/frame too long %ldms\n", ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec)) / (1000 * 1000)); Debug(4, "video/vaapi: put1 %2u put2 %2u\n", put1 - start, put2 - put1); } #ifdef noDEBUG Debug(3, "video/vaapi: time/frame %ldms\n", ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec)) / (1000 * 1000)); if (put2 > start + 20) { Debug(3, "video/vaapi: putsurface too long %ums\n", put2 - start); } Debug(4, "video/vaapi: put1 %2u put2 %2u\n", put1 - start, put2 - put1); #endif decoder->FrameTime = nowtime; } #ifdef USE_GLX if (GlxEnabled) { // // add OSD // if (OsdShown) { GlxRenderTexture(OsdGlTextures[OsdIndex], 0, 0, VideoWindowWidth, VideoWindowHeight); // FIXME: toggle osd } //glFinish(); glXSwapBuffers(XlibDisplay, VideoWindow); GlxCheck(); //glClearColor(1.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT); } #endif } /// /// Set VA-API decoder video clock. /// /// @param decoder VA-API hardware decoder /// @param pts audio presentation timestamp /// void VaapiSetClock(VaapiDecoder * decoder, int64_t pts) { decoder->PTS = pts; } /// /// Get VA-API decoder video clock. /// /// @param decoder VA-API decoder /// static int64_t VaapiGetClock(const VaapiDecoder * decoder) { // pts is the timestamp of the latest decoded frame if (decoder->PTS == (int64_t) AV_NOPTS_VALUE) { return AV_NOPTS_VALUE; } // subtract buffered decoded frames if (decoder->Interlaced) { return decoder->PTS - 20 * 90 * (2 * atomic_read(&decoder->SurfacesFilled) - decoder->SurfaceField); } return decoder->PTS - 20 * 90 * (atomic_read(&decoder->SurfacesFilled) + 2); } /// /// Set VA-API decoder closing stream flag. /// /// @param decoder VA-API decoder /// static void VaapiSetClosing(VaapiDecoder * decoder) { decoder->Closing = 1; } /// /// Reset start of frame counter. /// /// @param decoder VA-API decoder /// static void VaapiResetStart(VaapiDecoder * decoder) { decoder->StartCounter = 0; } /// /// Set trick play speed. /// /// @param decoder VA-API decoder /// @param speed trick speed (0 = normal) /// static void VaapiSetTrickSpeed(VaapiDecoder * decoder, int speed) { decoder->TrickSpeed = speed; decoder->TrickCounter = speed; if (speed) { decoder->Closing = 0; } } /// /// Get VA-API decoder statistics. /// /// @param decoder VA-API decoder /// @param[out] missed missed frames /// @param[out] duped duped frames /// @param[out] dropped dropped frames /// @param[out] count number of decoded frames /// void VaapiGetStats(VaapiDecoder * decoder, int *missed, int *duped, int *dropped, int *counter) { *missed = decoder->FramesMissed; *duped = decoder->FramesDuped; *dropped = decoder->FramesDropped; *counter = decoder->FrameCounter; } /// /// Sync decoder output to audio. /// /// trick-speed show frame times /// still-picture show frame until new frame arrives /// 60hz-mode repeat every 5th picture /// video>audio slow down video by duplicating frames /// video