vdr-plugin-softhddevice/0000755000175000017500000000000012504327116015137 5ustar tobiastobiasvdr-plugin-softhddevice/softhddevice.cpp0000644000175000017500000027165512504327116020332 0ustar tobiastobias/// /// @file softhddevice.cpp @brief A software HD device plugin for VDR. /// /// Copyright (c) 2011 - 2013 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: e933955f8ab84488a33fd839bbbb80b467b75fbe $ ////////////////////////////////////////////////////////////////////////////// #define __STDC_CONSTANT_MACROS ///< needed for ffmpeg UINT64_C #include #include #include #include #include #include #ifdef HAVE_CONFIG #include "config.h" #endif #include "softhddev.h" #include "softhddevice.h" #include "softhddevice_service.h" extern "C" { #include #include #include "audio.h" #include "video.h" #include "codec.h" } ////////////////////////////////////////////////////////////////////////////// /// vdr-plugin version number. /// Makefile extracts the version number for generating the file name /// for the distribution archive. static const char *const VERSION = "0.6.1rc1" #ifdef GIT_REV "-GIT" GIT_REV #endif ; /// vdr-plugin description. static const char *const DESCRIPTION = trNOOP("A software and GPU emulated HD device"); /// vdr-plugin text of main menu entry static const char *MAINMENUENTRY = trNOOP("SoftHdDevice"); /// single instance of softhddevice plugin device. static class cSoftHdDevice *MyDevice; ////////////////////////////////////////////////////////////////////////////// #define RESOLUTIONS 4 ///< number of resolutions /// resolutions names static const char *const Resolution[RESOLUTIONS] = { "576i", "720p", "1080i_fake", "1080i" }; static char ConfigMakePrimary; ///< config primary wanted static char ConfigHideMainMenuEntry; ///< config hide main menu entry static char ConfigDetachFromMainMenu; ///< detach from main menu entry instead of suspend static char ConfigSuspendClose; ///< suspend should close devices static char ConfigSuspendX11; ///< suspend should stop x11 static char Config4to3DisplayFormat = 1; ///< config 4:3 display format static char ConfigOtherDisplayFormat = 1; ///< config other display format static uint32_t ConfigVideoBackground; ///< config video background color static int ConfigOsdWidth; ///< config OSD width static int ConfigOsdHeight; ///< config OSD height static char ConfigVideoStudioLevels; ///< config use studio levels static char ConfigVideo60HzMode; ///< config use 60Hz display mode static char ConfigVideoSoftStartSync; ///< config use softstart sync static char ConfigVideoBlackPicture; ///< config enable black picture mode char ConfigVideoClearOnSwitch; ///< config enable Clear on channel switch static int ConfigVideoBrightness; ///< config video brightness static int ConfigVideoContrast = 1000; ///< config video contrast static int ConfigVideoSaturation = 1000; ///< config video saturation static int ConfigVideoHue; ///< config video hue /// config deinterlace static int ConfigVideoDeinterlace[RESOLUTIONS]; /// config skip chroma static int ConfigVideoSkipChromaDeinterlace[RESOLUTIONS]; /// config inverse telecine static int ConfigVideoInverseTelecine[RESOLUTIONS]; /// config denoise static int ConfigVideoDenoise[RESOLUTIONS]; /// config sharpen static int ConfigVideoSharpen[RESOLUTIONS]; /// config scaling static int ConfigVideoScaling[RESOLUTIONS]; /// config cut top and bottom pixels static int ConfigVideoCutTopBottom[RESOLUTIONS]; /// config cut left and right pixels static int ConfigVideoCutLeftRight[RESOLUTIONS]; static int ConfigAutoCropEnabled; ///< auto crop detection enabled static int ConfigAutoCropInterval; ///< auto crop detection interval static int ConfigAutoCropDelay; ///< auto crop detection delay static int ConfigAutoCropTolerance; ///< auto crop detection tolerance static int ConfigVideoAudioDelay; ///< config audio delay static char ConfigAudioDrift; ///< config audio drift static char ConfigAudioPassthrough; ///< config audio pass-through mask static char AudioPassthroughState; ///< flag audio pass-through on/off static char ConfigAudioDownmix; ///< config ffmpeg audio downmix static char ConfigAudioSoftvol; ///< config use software volume static char ConfigAudioNormalize; ///< config use normalize volume static int ConfigAudioMaxNormalize; ///< config max normalize factor static char ConfigAudioCompression; ///< config use volume compression static int ConfigAudioMaxCompression; ///< config max volume compression static int ConfigAudioStereoDescent; ///< config reduce stereo loudness int ConfigAudioBufferTime; ///< config size ms of audio buffer static int ConfigAudioAutoAES; ///< config automatic AES handling static char *ConfigX11Display; ///< config x11 display static char *ConfigAudioDevice; ///< config audio stereo device static char *ConfigPassthroughDevice; ///< config audio pass-through device #ifdef USE_PIP static int ConfigPipX = 100 - 3 - 18; ///< config pip pip x in % static int ConfigPipY = 100 - 4 - 18; ///< config pip pip y in % static int ConfigPipWidth = 18; ///< config pip pip width in % static int ConfigPipHeight = 18; ///< config pip pip height in % static int ConfigPipVideoX; ///< config pip video x in % static int ConfigPipVideoY; ///< config pip video y in % static int ConfigPipVideoWidth; ///< config pip video width in % static int ConfigPipVideoHeight; ///< config pip video height in % static int ConfigPipAltX; ///< config pip alt. pip x in % static int ConfigPipAltY = 50; ///< config pip alt. pip y in % static int ConfigPipAltWidth; ///< config pip alt. pip width in % static int ConfigPipAltHeight = 50; ///< config pip alt. pip height in % static int ConfigPipAltVideoX; ///< config pip alt. video x in % static int ConfigPipAltVideoY; ///< config pip alt. video y in % static int ConfigPipAltVideoWidth; ///< config pip alt. video width in % static int ConfigPipAltVideoHeight = 50; ///< config pip alt. video height in % #endif static volatile int DoMakePrimary; ///< switch primary device to this #define SUSPEND_EXTERNAL -1 ///< play external suspend mode #define NOT_SUSPENDED 0 ///< not suspend mode #define SUSPEND_NORMAL 1 ///< normal suspend mode #define SUSPEND_DETACHED 2 ///< detached suspend mode static signed char SuspendMode; ///< suspend mode ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // C Callbacks ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin remote class. */ class cSoftRemote:public cRemote { public: /** ** Soft device remote class constructor. ** ** @param name remote name */ cSoftRemote(const char *name):cRemote(name) { } /** ** Put keycode into vdr event queue. ** ** @param code key code ** @param repeat flag key repeated ** @param release flag key released */ bool Put(const char *code, bool repeat = false, bool release = false) { return cRemote::Put(code, repeat, release); } }; /** ** Feed key press as remote input (called from C part). ** ** @param keymap target keymap "XKeymap" name ** @param key pressed/released key name ** @param repeat repeated key flag ** @param release released key flag ** @param letter x11 character string (system setting locale) */ extern "C" void FeedKeyPress(const char *keymap, const char *key, int repeat, int release, const char *letter) { cRemote *remote; cSoftRemote *csoft; if (!keymap || !key) { return; } // find remote for (remote = Remotes.First(); remote; remote = Remotes.Next(remote)) { if (!strcmp(remote->Name(), keymap)) { break; } } // if remote not already exists, create it if (remote) { csoft = (cSoftRemote *) remote; } else { dsyslog("[softhddev]%s: remote '%s' not found\n", __FUNCTION__, keymap); csoft = new cSoftRemote(keymap); } //dsyslog("[softhddev]%s %s, %s, %s\n", __FUNCTION__, keymap, key, letter); if (key[1]) { // no single character if (!csoft->Put(key, repeat, release) && letter && !cRemote::IsLearning()) { cCharSetConv conv; unsigned code; code = Utf8CharGet(conv.Convert(letter)); if (code <= 0xFF) { cRemote::Put(KBDKEY(code)); // feed it for edit mode } } } else if (!csoft->Put(key, repeat, release)) { cRemote::Put(KBDKEY(key[0])); // feed it for edit mode } } ////////////////////////////////////////////////////////////////////////////// // OSD ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin OSD class. */ class cSoftOsd:public cOsd { public: static volatile char Dirty; ///< flag force redraw everything int OsdLevel; ///< current osd level FIXME: remove cSoftOsd(int, int, uint); ///< osd constructor virtual ~ cSoftOsd(void); ///< osd destructor /// set the sub-areas to the given areas virtual eOsdError SetAreas(const tArea *, int); virtual void Flush(void); ///< commits all data to the hardware virtual void SetActive(bool); ///< sets OSD to be the active one }; volatile char cSoftOsd::Dirty; ///< flag force redraw everything /** ** Sets this OSD to be the active one. ** ** @param on true on, false off ** ** @note only needed as workaround for text2skin plugin with ** undrawn areas. */ void cSoftOsd::SetActive(bool on) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d level %d\n", __FUNCTION__, on, OsdLevel); #endif if (Active() == on) { return; // already active, no action } cOsd::SetActive(on); if (on) { Dirty = 1; // only flush here if there are already bitmaps if (GetBitmap(0)) { Flush(); } } else { OsdClose(); } } /** ** Constructor OSD. ** ** Initializes the OSD with the given coordinates. ** ** @param left x-coordinate of osd on display ** @param top y-coordinate of osd on display ** @param level level of the osd (smallest is shown) */ cSoftOsd::cSoftOsd(int left, int top, uint level) :cOsd(left, top, level) { #ifdef OSD_DEBUG /* FIXME: OsdWidth/OsdHeight not correct! */ dsyslog("[softhddev]%s: %dx%d%+d%+d, %d\n", __FUNCTION__, OsdWidth(), OsdHeight(), left, top, level); #endif OsdLevel = level; } /** ** OSD Destructor. ** ** Shuts down the OSD. */ cSoftOsd::~cSoftOsd(void) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: level %d\n", __FUNCTION__, OsdLevel); #endif SetActive(false); // done by SetActive: OsdClose(); #ifdef USE_YAEPG // support yaepghd, video window if (vidWin.bpp) { // restore fullsized video int width; int height; double video_aspect; ::GetOsdSize(&width, &height, &video_aspect); // works osd relative ::ScaleVideo(0, 0, width, height); } #endif } /** +* Set the sub-areas to the given areas */ eOsdError cSoftOsd::SetAreas(const tArea * areas, int n) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d areas \n", __FUNCTION__, n); #endif // clear old OSD, when new areas are set if (!IsTrueColor()) { cBitmap *bitmap; int i; for (i = 0; (bitmap = GetBitmap(i)); i++) { bitmap->Clean(); } } if (Active()) { VideoOsdClear(); Dirty = 1; } return cOsd::SetAreas(areas, n); } /** ** Actually commits all data to the OSD hardware. */ void cSoftOsd::Flush(void) { cPixmapMemory *pm; #ifdef OSD_DEBUG dsyslog("[softhddev]%s: level %d active %d\n", __FUNCTION__, OsdLevel, Active()); #endif if (!Active()) { // this osd is not active return; } #ifdef USE_YAEPG // support yaepghd, video window if (vidWin.bpp) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, vidWin.Width(), vidWin.Height(), vidWin.x1, vidWin.y2); #endif // FIXME: vidWin is OSD relative not video window. // FIXME: doesn't work if fixed OSD width != real window width // FIXME: solved in VideoSetOutputPosition ::ScaleVideo(Left() + vidWin.x1, Top() + vidWin.y1, vidWin.Width(), vidWin.Height()); } #endif if (!IsTrueColor()) { cBitmap *bitmap; int i; #ifdef OSD_DEBUG static char warned; if (!warned) { dsyslog("[softhddev]%s: FIXME: should be truecolor\n", __FUNCTION__); warned = 1; } #endif // draw all bitmaps for (i = 0; (bitmap = GetBitmap(i)); ++i) { uint8_t *argb; int x; int y; int w; int h; int x1; int y1; int x2; int y2; // get dirty bounding box if (Dirty) { // forced complete update x1 = 0; y1 = 0; x2 = bitmap->Width() - 1; y2 = bitmap->Height() - 1; } else if (!bitmap->Dirty(x1, y1, x2, y2)) { continue; // nothing dirty continue } // convert and upload only dirty areas w = x2 - x1 + 1; h = y2 - y1 + 1; if (1) { // just for the case it makes trouble int width; int height; double video_aspect; ::GetOsdSize(&width, &height, &video_aspect); if (w > width) { w = width; x2 = x1 + width - 1; } if (h > height) { h = height; y2 = y1 + height - 1; } } #ifdef DEBUG if (w > bitmap->Width() || h > bitmap->Height()) { esyslog(tr("[softhddev]: dirty area too big\n")); abort(); } #endif argb = (uint8_t *) malloc(w * h * sizeof(uint32_t)); for (y = y1; y <= y2; ++y) { for (x = x1; x <= x2; ++x) { ((uint32_t *) argb)[x - x1 + (y - y1) * w] = bitmap->GetColor(x, y); } } #ifdef OSD_DEBUG dsyslog("[softhddev]%s: draw %dx%d%+d%+d bm\n", __FUNCTION__, w, h, Left() + bitmap->X0() + x1, Top() + bitmap->Y0() + y1); #endif OsdDrawARGB(Left() + bitmap->X0() + x1, Top() + bitmap->Y0() + y1, w, h, argb); bitmap->Clean(); // FIXME: reuse argb free(argb); } Dirty = 0; return; } LOCK_PIXMAPS; while ((pm = (dynamic_cast < cPixmapMemory * >(RenderPixmaps())))) { int x; int y; int w; int h; x = Left() + pm->ViewPort().X(); y = Top() + pm->ViewPort().Y(); w = pm->ViewPort().Width(); h = pm->ViewPort().Height(); #ifdef OSD_DEBUG dsyslog("[softhddev]%s: draw %dx%d%+d%+d %p\n", __FUNCTION__, w, h, x, y, pm->Data()); #endif OsdDrawARGB(x, y, w, h, pm->Data()); #if APIVERSNUM >= 20110 DestroyPixmap(pm); #else delete pm; #endif } Dirty = 0; } ////////////////////////////////////////////////////////////////////////////// // OSD provider ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin OSD provider class. */ class cSoftOsdProvider:public cOsdProvider { private: static cOsd *Osd; ///< single OSD public: virtual cOsd * CreateOsd(int, int, uint); virtual bool ProvidesTrueColor(void); cSoftOsdProvider(void); ///< OSD provider constructor //virtual ~cSoftOsdProvider(); ///< OSD provider destructor }; cOsd *cSoftOsdProvider::Osd; ///< single osd /** ** Create a new OSD. ** ** @param left x-coordinate of OSD ** @param top y-coordinate of OSD ** @param level layer level of OSD */ cOsd *cSoftOsdProvider::CreateOsd(int left, int top, uint level) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %d, %d, %d\n", __FUNCTION__, left, top, level); #endif return Osd = new cSoftOsd(left, top, level); } /** ** Check if this OSD provider is able to handle a true color OSD. ** ** @returns true we are able to handle a true color OSD. */ bool cSoftOsdProvider::ProvidesTrueColor(void) { return true; } /** ** Create cOsdProvider class. */ cSoftOsdProvider::cSoftOsdProvider(void) : cOsdProvider() { #ifdef OSD_DEBUG dsyslog("[softhddev]%s:\n", __FUNCTION__); #endif } /** ** Destroy cOsdProvider class. cSoftOsdProvider::~cSoftOsdProvider() { dsyslog("[softhddev]%s:\n", __FUNCTION__); } */ ////////////////////////////////////////////////////////////////////////////// // cMenuSetupPage ////////////////////////////////////////////////////////////////////////////// /** ** Soft device plugin menu setup page class. */ class cMenuSetupSoft:public cMenuSetupPage { protected: /// /// local copies of global setup variables: /// @{ int General; int MakePrimary; int HideMainMenuEntry; int DetachFromMainMenu; int OsdSize; int OsdWidth; int OsdHeight; int SuspendClose; int SuspendX11; int Video; int Video4to3DisplayFormat; int VideoOtherDisplayFormat; uint32_t Background; uint32_t BackgroundAlpha; int StudioLevels; int _60HzMode; int SoftStartSync; int BlackPicture; int ClearOnSwitch; int Brightness; int Contrast; int Saturation; int Hue; int ResolutionShown[RESOLUTIONS]; int Scaling[RESOLUTIONS]; int Deinterlace[RESOLUTIONS]; int SkipChromaDeinterlace[RESOLUTIONS]; int InverseTelecine[RESOLUTIONS]; int Denoise[RESOLUTIONS]; int Sharpen[RESOLUTIONS]; int CutTopBottom[RESOLUTIONS]; int CutLeftRight[RESOLUTIONS]; int AutoCropInterval; int AutoCropDelay; int AutoCropTolerance; int Audio; int AudioDelay; int AudioDrift; int AudioPassthroughDefault; int AudioPassthroughPCM; int AudioPassthroughAC3; int AudioPassthroughEAC3; int AudioDownmix; int AudioSoftvol; int AudioNormalize; int AudioMaxNormalize; int AudioCompression; int AudioMaxCompression; int AudioStereoDescent; int AudioBufferTime; int AudioAutoAES; #ifdef USE_PIP int Pip; int PipX; int PipY; int PipWidth; int PipHeight; int PipVideoX; int PipVideoY; int PipVideoWidth; int PipVideoHeight; int PipAltX; int PipAltY; int PipAltWidth; int PipAltHeight; int PipAltVideoX; int PipAltVideoY; int PipAltVideoWidth; int PipAltVideoHeight; #endif /// @} private: inline cOsdItem * CollapsedItem(const char *, int &, const char * = NULL); void Create(void); // create sub-menu protected: virtual void Store(void); public: cMenuSetupSoft(void); virtual eOSState ProcessKey(eKeys); // handle input }; /** ** Create a seperator item. ** ** @param label text inside separator */ static inline cOsdItem *SeparatorItem(const char *label) { cOsdItem *item; item = new cOsdItem(cString::sprintf("* %s: ", label)); item->SetSelectable(false); return item; } /** ** Create a collapsed item. ** ** @param label text inside collapsed ** @param flag flag handling collapsed or opened ** @param msg open message */ inline cOsdItem *cMenuSetupSoft::CollapsedItem(const char *label, int &flag, const char *msg) { cOsdItem *item; item = new cMenuEditBoolItem(cString::sprintf("* %s", label), &flag, msg ? msg : tr("show"), tr("hide")); return item; } /** ** Create setup menu. */ void cMenuSetupSoft::Create(void) { static const char *const osd_size[] = { "auto", "1920x1080", "1280x720", "custom", }; static const char *const video_display_formats_4_3[] = { "pan&scan", "letterbox", "center cut-out", }; static const char *const video_display_formats_16_9[] = { "pan&scan", "pillarbox", "center cut-out", }; static const char *const deinterlace[] = { "Bob", "Weave/None", "Temporal", "TemporalSpatial", "Software Bob", "Software Spatial", }; static const char *const deinterlace_short[] = { "B", "W", "T", "T+S", "S+B", "S+S", }; static const char *const scaling[] = { "Normal", "Fast", "HQ", "Anamorphic" }; static const char *const scaling_short[] = { "N", "F", "HQ", "A" }; static const char *const audiodrift[] = { "None", "PCM", "AC-3", "PCM + AC-3" }; static const char *const resolution[RESOLUTIONS] = { "576i", "720p", "fake 1080i", "1080i" }; int current; int i; current = Current(); // get current menu item index Clear(); // clear the menu // // general // Add(CollapsedItem(tr("General"), General)); if (General) { Add(new cMenuEditBoolItem(tr("Make primary device"), &MakePrimary, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Hide main menu entry"), &HideMainMenuEntry, trVDR("no"), trVDR("yes"))); // // osd // Add(new cMenuEditStraItem(tr("Osd size"), &OsdSize, 4, osd_size)); if (OsdSize == 3) { Add(new cMenuEditIntItem(tr("Osd width"), &OsdWidth, 0, 4096)); Add(new cMenuEditIntItem(tr("Osd height"), &OsdHeight, 0, 4096)); } // // suspend // Add(SeparatorItem(tr("Suspend"))); Add(new cMenuEditBoolItem(tr("Detach from main menu entry"), &DetachFromMainMenu, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Suspend closes video+audio"), &SuspendClose, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Suspend stops x11"), &SuspendX11, trVDR("no"), trVDR("yes"))); } // // video // Add(CollapsedItem(tr("Video"), Video)); if (Video) { Add(new cMenuEditStraItem(trVDR("4:3 video display format"), &Video4to3DisplayFormat, 3, video_display_formats_4_3)); Add(new cMenuEditStraItem(trVDR("16:9+other video display format"), &VideoOtherDisplayFormat, 3, video_display_formats_16_9)); // FIXME: switch config gray/color configuration Add(new cMenuEditIntItem(tr("Video background color (RGB)"), (int *)&Background, 0, 0x00FFFFFF)); Add(new cMenuEditIntItem(tr("Video background color (Alpha)"), (int *)&BackgroundAlpha, 0, 0xFF)); Add(new cMenuEditBoolItem(tr("Use studio levels (vdpau only)"), &StudioLevels, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("60hz display mode"), &_60HzMode, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Soft start a/v sync"), &SoftStartSync, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Black during channel switch"), &BlackPicture, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Clear decoder on channel switch"), &ClearOnSwitch, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr("Brightness (-1000..1000) (vdpau)"), &Brightness, -1000, 1000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Contrast (0..10000) (vdpau)"), &Contrast, 0, 10000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Saturation (0..10000) (vdpau)"), &Saturation, 0, 10000, tr("min"), tr("max"))); Add(new cMenuEditIntItem(tr("Hue (-3141..3141) (vdpau)"), &Hue, -3141, 3141, tr("min"), tr("max"))); for (i = 0; i < RESOLUTIONS; ++i) { cString msg; // short hidden informations msg = cString::sprintf("%s,%s%s%s%s,...", scaling_short[Scaling[i]], deinterlace_short[Deinterlace[i]], SkipChromaDeinterlace[i] ? ",skip" : "", InverseTelecine[i] ? ",ITC" : "", Denoise[i] ? ",DN" : ""); Add(CollapsedItem(resolution[i], ResolutionShown[i], msg)); if (ResolutionShown[i]) { Add(new cMenuEditStraItem(tr("Scaling"), &Scaling[i], 4, scaling)); Add(new cMenuEditStraItem(tr("Deinterlace"), &Deinterlace[i], 6, deinterlace)); Add(new cMenuEditBoolItem(tr("SkipChromaDeinterlace (vdpau)"), &SkipChromaDeinterlace[i], trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Inverse Telecine (vdpau)"), &InverseTelecine[i], trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr("Denoise (0..1000) (vdpau)"), &Denoise[i], 0, 1000, tr("off"), tr("max"))); Add(new cMenuEditIntItem(tr("Sharpen (-1000..1000) (vdpau)"), &Sharpen[i], -1000, 1000, tr("blur max"), tr("sharpen max"))); Add(new cMenuEditIntItem(tr("Cut top and bottom (pixel)"), &CutTopBottom[i], 0, 250)); Add(new cMenuEditIntItem(tr("Cut left and right (pixel)"), &CutLeftRight[i], 0, 250)); } } // // auto-crop // Add(SeparatorItem(tr("Auto-crop"))); Add(new cMenuEditIntItem(tr("Autocrop interval (frames)"), &AutoCropInterval, 0, 200, tr("off"))); Add(new cMenuEditIntItem(tr("Autocrop delay (n * interval)"), &AutoCropDelay, 0, 200)); Add(new cMenuEditIntItem(tr("Autocrop tolerance (pixel)"), &AutoCropTolerance, 0, 32)); } // // audio // Add(CollapsedItem(tr("Audio"), Audio)); if (Audio) { Add(new cMenuEditIntItem(tr("Audio/Video delay (ms)"), &AudioDelay, -1000, 1000)); Add(new cMenuEditStraItem(tr("Audio drift correction"), &AudioDrift, 4, audiodrift)); Add(new cMenuEditBoolItem(tr("Pass-through default"), &AudioPassthroughDefault, trVDR("off"), trVDR("on"))); Add(new cMenuEditBoolItem(tr("\040\040PCM pass-through"), &AudioPassthroughPCM, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("\040\040AC-3 pass-through"), &AudioPassthroughAC3, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("\040\040E-AC-3 pass-through"), &AudioPassthroughEAC3, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Enable (E-)AC-3 (decoder) downmix"), &AudioDownmix, trVDR("no"), trVDR("yes"))); Add(new cMenuEditBoolItem(tr("Volume control"), &AudioSoftvol, tr("Hardware"), tr("Software"))); Add(new cMenuEditBoolItem(tr("Enable normalize volume"), &AudioNormalize, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr(" Max normalize factor (/1000)"), &AudioMaxNormalize, 0, 10000)); Add(new cMenuEditBoolItem(tr("Enable volume compression"), &AudioCompression, trVDR("no"), trVDR("yes"))); Add(new cMenuEditIntItem(tr(" Max compression factor (/1000)"), &AudioMaxCompression, 0, 10000)); Add(new cMenuEditIntItem(tr("Reduce stereo volume (/1000)"), &AudioStereoDescent, 0, 1000)); Add(new cMenuEditIntItem(tr("Audio buffer size (ms)"), &AudioBufferTime, 0, 1000)); Add(new cMenuEditBoolItem(tr("Enable automatic AES"), &AudioAutoAES, trVDR("no"), trVDR("yes"))); } #ifdef USE_PIP // // PIP // Add(CollapsedItem(tr("Picture-In-Picture"), Pip)); if (Pip) { // FIXME: predefined modes/custom mode Add(new cMenuEditIntItem(tr("Pip X (%)"), &PipX, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Y (%)"), &PipY, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Width (%)"), &PipWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Pip Height (%)"), &PipHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Video X (%)"), &PipVideoX, 0, 100)); Add(new cMenuEditIntItem(tr("Video Y (%)"), &PipVideoY, 0, 100)); Add(new cMenuEditIntItem(tr("Video Width (%)"), &PipVideoWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Video Height (%)"), &PipVideoHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip X (%)"), &PipAltX, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Y (%)"), &PipAltY, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Width (%)"), &PipAltWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Pip Height (%)"), &PipAltHeight, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video X (%)"), &PipAltVideoX, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Y (%)"), &PipAltVideoY, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Width (%)"), &PipAltVideoWidth, 0, 100)); Add(new cMenuEditIntItem(tr("Alternative Video Height (%)"), &PipAltVideoHeight, 0, 100)); } #endif SetCurrent(Get(current)); // restore selected menu entry Display(); // display build menu } /** ** Process key for setup menu. */ eOSState cMenuSetupSoft::ProcessKey(eKeys key) { eOSState state; int old_general; int old_video; int old_audio; #ifdef USE_PIP int old_pip; #endif int old_osd_size; int old_resolution_shown[RESOLUTIONS]; int i; old_general = General; old_video = Video; old_audio = Audio; #ifdef USE_PIP old_pip = Pip; #endif old_osd_size = OsdSize; memcpy(old_resolution_shown, ResolutionShown, sizeof(ResolutionShown)); state = cMenuSetupPage::ProcessKey(key); if (key != kNone) { // update menu only, if something on the structure has changed // this is needed because VDR menus are evil slow if (old_general != General || old_video != Video || old_audio != Audio #ifdef USE_PIP || old_pip != Pip #endif || old_osd_size != OsdSize) { Create(); // update menu } else { for (i = 0; i < RESOLUTIONS; ++i) { if (old_resolution_shown[i] != ResolutionShown[i]) { Create(); // update menu break; } } } } return state; } /** ** Constructor setup menu. ** ** Import global config variables into setup. */ cMenuSetupSoft::cMenuSetupSoft(void) { int i; // // general // General = 0; MakePrimary = ConfigMakePrimary; HideMainMenuEntry = ConfigHideMainMenuEntry; DetachFromMainMenu = ConfigDetachFromMainMenu; // // osd // OsdWidth = ConfigOsdWidth; OsdHeight = ConfigOsdHeight; if (!OsdWidth && !OsdHeight) { OsdSize = 0; } else if (OsdWidth == 1920 && OsdHeight == 1080) { OsdSize = 1; } else if (OsdWidth == 1280 && OsdHeight == 720) { OsdSize = 2; } else { OsdSize = 3; } // // suspend // SuspendClose = ConfigSuspendClose; SuspendX11 = ConfigSuspendX11; // // video // Video = 0; Video4to3DisplayFormat = Config4to3DisplayFormat; VideoOtherDisplayFormat = ConfigOtherDisplayFormat; // no unsigned int menu item supported, split background color/alpha Background = ConfigVideoBackground >> 8; BackgroundAlpha = ConfigVideoBackground & 0xFF; StudioLevels = ConfigVideoStudioLevels; _60HzMode = ConfigVideo60HzMode; SoftStartSync = ConfigVideoSoftStartSync; BlackPicture = ConfigVideoBlackPicture; ClearOnSwitch = ConfigVideoClearOnSwitch; Brightness = ConfigVideoBrightness; Contrast = ConfigVideoContrast; Saturation = ConfigVideoSaturation; Hue = ConfigVideoHue; for (i = 0; i < RESOLUTIONS; ++i) { ResolutionShown[i] = 0; Scaling[i] = ConfigVideoScaling[i]; Deinterlace[i] = ConfigVideoDeinterlace[i]; SkipChromaDeinterlace[i] = ConfigVideoSkipChromaDeinterlace[i]; InverseTelecine[i] = ConfigVideoInverseTelecine[i]; Denoise[i] = ConfigVideoDenoise[i]; Sharpen[i] = ConfigVideoSharpen[i]; CutTopBottom[i] = ConfigVideoCutTopBottom[i]; CutLeftRight[i] = ConfigVideoCutLeftRight[i]; } // // auto-crop // AutoCropInterval = ConfigAutoCropInterval; AutoCropDelay = ConfigAutoCropDelay; AutoCropTolerance = ConfigAutoCropTolerance; // // audio // Audio = 0; AudioDelay = ConfigVideoAudioDelay; AudioDrift = ConfigAudioDrift; AudioPassthroughDefault = AudioPassthroughState; AudioPassthroughPCM = ConfigAudioPassthrough & CodecPCM; AudioPassthroughAC3 = ConfigAudioPassthrough & CodecAC3; AudioPassthroughEAC3 = ConfigAudioPassthrough & CodecEAC3; AudioDownmix = ConfigAudioDownmix; AudioSoftvol = ConfigAudioSoftvol; AudioNormalize = ConfigAudioNormalize; AudioMaxNormalize = ConfigAudioMaxNormalize; AudioCompression = ConfigAudioCompression; AudioMaxCompression = ConfigAudioMaxCompression; AudioStereoDescent = ConfigAudioStereoDescent; AudioBufferTime = ConfigAudioBufferTime; AudioAutoAES = ConfigAudioAutoAES; #ifdef USE_PIP // // PIP // Pip = 0; PipX = ConfigPipX; PipY = ConfigPipY; PipWidth = ConfigPipWidth; PipHeight = ConfigPipHeight; PipVideoX = ConfigPipVideoX; PipVideoY = ConfigPipVideoY; PipVideoWidth = ConfigPipVideoWidth; PipVideoHeight = ConfigPipVideoHeight; PipAltX = ConfigPipAltX; PipAltY = ConfigPipAltY; PipAltWidth = ConfigPipAltWidth; PipAltHeight = ConfigPipAltHeight; PipAltVideoX = ConfigPipAltVideoX; PipAltVideoY = ConfigPipAltVideoY; PipAltVideoWidth = ConfigPipAltVideoWidth; PipAltVideoHeight = ConfigPipAltVideoHeight; #endif Create(); } /** ** Store setup. */ void cMenuSetupSoft::Store(void) { int i; SetupStore("MakePrimary", ConfigMakePrimary = MakePrimary); SetupStore("HideMainMenuEntry", ConfigHideMainMenuEntry = HideMainMenuEntry); SetupStore("DetachFromMainMenu", ConfigDetachFromMainMenu = DetachFromMainMenu); switch (OsdSize) { case 0: OsdWidth = 0; OsdHeight = 0; break; case 1: OsdWidth = 1920; OsdHeight = 1080; break; case 2: OsdWidth = 1280; OsdHeight = 720; default: break; } if (ConfigOsdWidth != OsdWidth || ConfigOsdHeight != OsdHeight) { VideoSetOsdSize(ConfigOsdWidth = OsdWidth, ConfigOsdHeight = OsdHeight); // FIXME: shown osd size not updated } SetupStore("Osd.Width", ConfigOsdWidth); SetupStore("Osd.Height", ConfigOsdHeight); SetupStore("Suspend.Close", ConfigSuspendClose = SuspendClose); SetupStore("Suspend.X11", ConfigSuspendX11 = SuspendX11); SetupStore("Video4to3DisplayFormat", Config4to3DisplayFormat = Video4to3DisplayFormat); VideoSet4to3DisplayFormat(Config4to3DisplayFormat); SetupStore("VideoOtherDisplayFormat", ConfigOtherDisplayFormat = VideoOtherDisplayFormat); VideoSetOtherDisplayFormat(ConfigOtherDisplayFormat); ConfigVideoBackground = Background << 8 | (BackgroundAlpha & 0xFF); SetupStore("Background", ConfigVideoBackground); VideoSetBackground(ConfigVideoBackground); SetupStore("StudioLevels", ConfigVideoStudioLevels = StudioLevels); VideoSetStudioLevels(ConfigVideoStudioLevels); SetupStore("60HzMode", ConfigVideo60HzMode = _60HzMode); VideoSet60HzMode(ConfigVideo60HzMode); SetupStore("SoftStartSync", ConfigVideoSoftStartSync = SoftStartSync); VideoSetSoftStartSync(ConfigVideoSoftStartSync); SetupStore("BlackPicture", ConfigVideoBlackPicture = BlackPicture); VideoSetBlackPicture(ConfigVideoBlackPicture); SetupStore("ClearOnSwitch", ConfigVideoClearOnSwitch = ClearOnSwitch); SetupStore("Brightness", ConfigVideoBrightness = Brightness); VideoSetBrightness(ConfigVideoBrightness); SetupStore("Contrast", ConfigVideoContrast = Contrast); VideoSetContrast(ConfigVideoContrast); SetupStore("Saturation", ConfigVideoSaturation = Saturation); VideoSetSaturation(ConfigVideoSaturation); SetupStore("Hue", ConfigVideoHue = Hue); VideoSetHue(ConfigVideoHue); for (i = 0; i < RESOLUTIONS; ++i) { char buf[128]; snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Scaling"); SetupStore(buf, ConfigVideoScaling[i] = Scaling[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Deinterlace"); SetupStore(buf, ConfigVideoDeinterlace[i] = Deinterlace[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace"); SetupStore(buf, ConfigVideoSkipChromaDeinterlace[i] = SkipChromaDeinterlace[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "InverseTelecine"); SetupStore(buf, ConfigVideoInverseTelecine[i] = InverseTelecine[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Denoise"); SetupStore(buf, ConfigVideoDenoise[i] = Denoise[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Sharpen"); SetupStore(buf, ConfigVideoSharpen[i] = Sharpen[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutTopBottom"); SetupStore(buf, ConfigVideoCutTopBottom[i] = CutTopBottom[i]); snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutLeftRight"); SetupStore(buf, ConfigVideoCutLeftRight[i] = CutLeftRight[i]); } VideoSetScaling(ConfigVideoScaling); VideoSetDeinterlace(ConfigVideoDeinterlace); VideoSetSkipChromaDeinterlace(ConfigVideoSkipChromaDeinterlace); VideoSetInverseTelecine(ConfigVideoInverseTelecine); VideoSetDenoise(ConfigVideoDenoise); VideoSetSharpen(ConfigVideoSharpen); VideoSetCutTopBottom(ConfigVideoCutTopBottom); VideoSetCutLeftRight(ConfigVideoCutLeftRight); SetupStore("AutoCrop.Interval", ConfigAutoCropInterval = AutoCropInterval); SetupStore("AutoCrop.Delay", ConfigAutoCropDelay = AutoCropDelay); SetupStore("AutoCrop.Tolerance", ConfigAutoCropTolerance = AutoCropTolerance); VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); ConfigAutoCropEnabled = ConfigAutoCropInterval != 0; SetupStore("AudioDelay", ConfigVideoAudioDelay = AudioDelay); VideoSetAudioDelay(ConfigVideoAudioDelay); SetupStore("AudioDrift", ConfigAudioDrift = AudioDrift); CodecSetAudioDrift(ConfigAudioDrift); ConfigAudioPassthrough = (AudioPassthroughPCM ? CodecPCM : 0) | (AudioPassthroughAC3 ? CodecAC3 : 0) | (AudioPassthroughEAC3 ? CodecEAC3 : 0); AudioPassthroughState = AudioPassthroughDefault; if (AudioPassthroughState) { SetupStore("AudioPassthrough", ConfigAudioPassthrough); CodecSetAudioPassthrough(ConfigAudioPassthrough); } else { SetupStore("AudioPassthrough", -ConfigAudioPassthrough); CodecSetAudioPassthrough(0); } SetupStore("AudioDownmix", ConfigAudioDownmix = AudioDownmix); CodecSetAudioDownmix(ConfigAudioDownmix); SetupStore("AudioSoftvol", ConfigAudioSoftvol = AudioSoftvol); AudioSetSoftvol(ConfigAudioSoftvol); SetupStore("AudioNormalize", ConfigAudioNormalize = AudioNormalize); SetupStore("AudioMaxNormalize", ConfigAudioMaxNormalize = AudioMaxNormalize); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); SetupStore("AudioCompression", ConfigAudioCompression = AudioCompression); SetupStore("AudioMaxCompression", ConfigAudioMaxCompression = AudioMaxCompression); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); SetupStore("AudioStereoDescent", ConfigAudioStereoDescent = AudioStereoDescent); AudioSetStereoDescent(ConfigAudioStereoDescent); SetupStore("AudioBufferTime", ConfigAudioBufferTime = AudioBufferTime); SetupStore("AudioAutoAES", ConfigAudioAutoAES = AudioAutoAES); AudioSetAutoAES(ConfigAudioAutoAES); #ifdef USE_PIP SetupStore("pip.X", ConfigPipX = PipX); SetupStore("pip.Y", ConfigPipY = PipY); SetupStore("pip.Width", ConfigPipWidth = PipWidth); SetupStore("pip.Height", ConfigPipHeight = PipHeight); SetupStore("pip.VideoX", ConfigPipVideoX = PipVideoX); SetupStore("pip.VideoY", ConfigPipVideoY = PipVideoY); SetupStore("pip.VideoWidth", ConfigPipVideoWidth = PipVideoWidth); SetupStore("pip.VideoHeight", ConfigPipVideoHeight = PipVideoHeight); SetupStore("pip.Alt.X", ConfigPipAltX = PipAltX); SetupStore("pip.Alt.Y", ConfigPipAltY = PipAltY); SetupStore("pip.Alt.Width", ConfigPipAltWidth = PipAltWidth); SetupStore("pip.Alt.Height", ConfigPipAltHeight = PipAltHeight); SetupStore("pip.Alt.VideoX", ConfigPipAltVideoX = PipAltVideoX); SetupStore("pip.Alt.VideoY", ConfigPipAltVideoY = PipAltVideoY); SetupStore("pip.Alt.VideoWidth", ConfigPipAltVideoWidth = PipAltVideoWidth); SetupStore("pip.Alt.VideoHeight", ConfigPipAltVideoHeight = PipAltVideoHeight); #endif } ////////////////////////////////////////////////////////////////////////////// // cPlayer ////////////////////////////////////////////////////////////////////////////// /** ** Dummy player for suspend mode. */ class cSoftHdPlayer:public cPlayer { protected: public: cSoftHdPlayer(void); virtual ~ cSoftHdPlayer(); }; cSoftHdPlayer::cSoftHdPlayer(void) { } cSoftHdPlayer::~cSoftHdPlayer() { Detach(); } ////////////////////////////////////////////////////////////////////////////// // cControl ////////////////////////////////////////////////////////////////////////////// /** ** Dummy control class for suspend mode. */ class cSoftHdControl:public cControl { public: static cSoftHdPlayer *Player; ///< dummy player virtual void Hide(void) ///< hide control { } virtual eOSState ProcessKey(eKeys); ///< process input events cSoftHdControl(void); ///< control constructor virtual ~ cSoftHdControl(); ///< control destructor }; cSoftHdPlayer *cSoftHdControl::Player; ///< dummy player instance /** ** Handle a key event. ** ** @param key key pressed */ eOSState cSoftHdControl::ProcessKey(eKeys key) { if (SuspendMode == SUSPEND_NORMAL && (!ISMODELESSKEY(key) || key == kMenu || key == kBack || key == kStop)) { delete Player; Player = NULL; Resume(); SuspendMode = NOT_SUSPENDED; return osEnd; } return osContinue; } /** ** Player control constructor. */ cSoftHdControl::cSoftHdControl(void) : cControl(Player = new cSoftHdPlayer) { } /** ** Player control destructor. */ cSoftHdControl::~cSoftHdControl() { delete Player; Player = NULL; // loose control resume if (SuspendMode == SUSPEND_NORMAL) { Resume(); SuspendMode = NOT_SUSPENDED; } dsyslog("[softhddev]%s: dummy player stopped\n", __FUNCTION__); } ////////////////////////////////////////////////////////////////////////////// // PIP ////////////////////////////////////////////////////////////////////////////// #ifdef USE_PIP extern "C" void DelPip(void); ///< remove PIP static int PipAltPosition; ///< flag alternative position ////////////////////////////////////////////////////////////////////////////// // cReceiver ////////////////////////////////////////////////////////////////////////////// #include /** ** Receiver class for PIP mode. */ class cSoftReceiver:public cReceiver { protected: virtual void Activate(bool); virtual void Receive(uchar *, int); public: cSoftReceiver(const cChannel *); ///< receiver constructor virtual ~ cSoftReceiver(); ///< receiver destructor }; /** ** Receiver constructor. ** ** @param channel channel to receive */ cSoftReceiver::cSoftReceiver(const cChannel * channel):cReceiver(NULL, MINPRIORITY) { // cReceiver::channelID not setup, this can cause trouble // we want video only AddPid(channel->Vpid()); } /** ** Receiver destructor. */ cSoftReceiver::~cSoftReceiver() { Detach(); } /** ** Called before the receiver gets attached or detached. ** ** @param on flag attached, detached */ void cSoftReceiver::Activate(bool on) { if (on) { int width; int height; double video_aspect; GetOsdSize(&width, &height, &video_aspect); if (PipAltPosition) { PipStart((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100, ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width, ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height, (ConfigPipAltX * width) / 100, (ConfigPipAltY * height) / 100, ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width, ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height); } else { PipStart((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100, ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width, ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100, (ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width, ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height); } } else { PipStop(); } } /// /// Parse packetized elementary stream. /// /// @param data payload data of transport stream /// @param size number of payload data bytes /// @param is_start flag, start of pes packet /// static void PipPesParse(const uint8_t * data, int size, int is_start) { static uint8_t *pes_buf; static int pes_size; static int pes_index; // FIXME: quick&dirty if (!pes_buf) { pes_size = 500 * 1024 * 1024; pes_buf = (uint8_t *) malloc(pes_size); if (!pes_buf) { // out of memory, should never happen return; } pes_index = 0; } if (is_start) { // start of pes packet if (pes_index) { if (0) { fprintf(stderr, "pip: PES packet %8d %02x%02x\n", pes_index, pes_buf[2], pes_buf[3]); } if (pes_buf[0] || pes_buf[1] || pes_buf[2] != 0x01) { // FIXME: first should always fail esyslog(tr("[softhddev]pip: invalid PES packet %d\n"), pes_index); } else { PipPlayVideo(pes_buf, pes_index); // FIXME: buffer full: pes packet is dropped } pes_index = 0; } } if (pes_index + size > pes_size) { esyslog(tr("[softhddev]pip: pes buffer too small\n")); pes_size *= 2; if (pes_index + size > pes_size) { pes_size = (pes_index + size) * 2; } pes_buf = (uint8_t *) realloc(pes_buf, pes_size); if (!pes_buf) { // out of memory, should never happen return; } } memcpy(pes_buf + pes_index, data, size); pes_index += size; } /// Transport stream packet size #define TS_PACKET_SIZE 188 /// Transport stream packet sync byte #define TS_PACKET_SYNC 0x47 /** ** Receive TS packet from device. ** ** @param data ts packet ** @param size size (#TS_PACKET_SIZE=188) of tes packet */ void cSoftReceiver::Receive(uchar * data, int size) { const uint8_t *p; p = data; while (size >= TS_PACKET_SIZE) { int payload; if (p[0] != TS_PACKET_SYNC) { esyslog(tr("[softhddev]tsdemux: transport stream out of sync\n")); // FIXME: kill all buffers return; } if (p[1] & 0x80) { // error indicatord dsyslog("[softhddev]tsdemux: transport error\n"); // FIXME: kill all buffers goto next_packet; } if (0) { int pid; pid = (p[1] & 0x1F) << 8 | p[2]; fprintf(stderr, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : ""); } // skip adaptation field switch (p[3] & 0x30) { // adaption field case 0x00: // reserved case 0x20: // adaptation field only default: goto next_packet; case 0x10: // only payload payload = 4; break; case 0x30: // skip adapation field payload = 5 + p[4]; // illegal length, ignore packet if (payload >= TS_PACKET_SIZE) { dsyslog ("[softhddev]tsdemux: illegal adaption field length\n"); goto next_packet; } break; } PipPesParse(p + payload, TS_PACKET_SIZE - payload, p[1] & 0x40); next_packet: p += TS_PACKET_SIZE; size -= TS_PACKET_SIZE; } } ////////////////////////////////////////////////////////////////////////////// static cSoftReceiver *PipReceiver; ///< PIP receiver static int PipChannelNr; ///< last PIP channel number static const cChannel *PipChannel; ///< current PIP channel /** ** Stop PIP. */ extern "C" void DelPip(void) { delete PipReceiver; PipReceiver = NULL; PipChannel = NULL; } /** ** Prepare new PIP. ** ** @param channel_nr channel number */ static void NewPip(int channel_nr) { const cChannel *channel; cDevice *device; cSoftReceiver *receiver; #ifdef DEBUG // is device replaying? if (cDevice::PrimaryDevice()->Replaying() && cControl::Control()) { dsyslog("[softhddev]%s: replay active\n", __FUNCTION__); // FIXME: need to find PID } #endif if (!channel_nr) { channel_nr = cDevice::CurrentChannel(); } if (channel_nr && (channel = Channels.GetByNumber(channel_nr)) && (device = cDevice::GetDevice(channel, 0, false, false))) { DelPip(); device->SwitchChannel(channel, false); receiver = new cSoftReceiver(channel); device->AttachReceiver(receiver); PipReceiver = receiver; PipChannel = channel; PipChannelNr = channel_nr; } } /** ** Toggle PIP on/off. */ static void TogglePip(void) { if (PipReceiver) { int attached; attached = PipReceiver->IsAttached(); DelPip(); if (attached) { // turn off only if last PIP was on return; } } NewPip(PipChannelNr); } /** ** Switch PIP to next available channel. ** ** @param direction direction of channel switch */ static void PipNextAvailableChannel(int direction) { const cChannel *channel; const cChannel *first; channel = PipChannel; first = channel; DelPip(); // disable PIP to free the device while (channel) { bool ndr; cDevice *device; channel = direction > 0 ? Channels.Next(channel) : Channels.Prev(channel); if (!channel && Setup.ChannelsWrap) { channel = direction > 0 ? Channels.First() : Channels.Last(); } if (channel && !channel->GroupSep() && (device = cDevice::GetDevice(channel, 0, false, true)) && device->ProvidesChannel(channel, 0, &ndr) && !ndr) { NewPip(channel->Number()); return; } if (channel == first) { Skins.Message(mtError, tr("Channel not available!")); break; } } } /** ** Swap PIP channels. */ static void SwapPipChannels(void) { const cChannel *channel; channel = PipChannel; DelPip(); NewPip(0); if (channel) { Channels.SwitchTo(channel->Number()); } } /** ** Swap PIP position. */ static void SwapPipPosition(void) { int width; int height; double video_aspect; PipAltPosition ^= 1; if (!PipReceiver) { // no PIP visible, no update needed return; } GetOsdSize(&width, &height, &video_aspect); if (PipAltPosition) { PipSetPosition((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100, ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width, ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height, (ConfigPipAltX * width) / 100, (ConfigPipAltY * height) / 100, ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width, ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height); } else { PipSetPosition((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100, ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width, ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100, (ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width, ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height); } } #endif ////////////////////////////////////////////////////////////////////////////// // cOsdMenu ////////////////////////////////////////////////////////////////////////////// /** ** Hotkey parsing state machine. */ typedef enum { HksInitial, ///< initial state HksBlue, ///< blue button pressed HksBlue1, ///< blue and 1 number pressed HksRed, ///< red button pressed } HkState; /** ** Soft device plugin menu class. */ class cSoftHdMenu:public cOsdMenu { private: HkState HotkeyState; ///< current hot-key state int HotkeyCode; ///< current hot-key code void Create(void); ///< create plugin main menu public: cSoftHdMenu(const char *, int = 0, int = 0, int = 0, int = 0, int = 0); virtual ~ cSoftHdMenu(); virtual eOSState ProcessKey(eKeys); }; /** ** Create main menu. */ void cSoftHdMenu::Create(void) { int current; int missed; int duped; int dropped; int counter; current = Current(); // get current menu item index Clear(); // clear the menu SetHasHotkeys(); if (ConfigDetachFromMainMenu) { Add(new cOsdItem(hk(tr("Detach SoftHdDevice")), osUser1)); } else { Add(new cOsdItem(hk(tr("Suspend SoftHdDevice")), osUser1)); } #ifdef USE_PIP if (PipReceiver) { Add(new cOsdItem(hk(tr("PIP toggle on/off: off")), osUser2)); } else { Add(new cOsdItem(hk(tr("PIP toggle on/off: on")), osUser2)); } Add(new cOsdItem(hk(tr("PIP zapmode (not working)")), osUser3)); Add(new cOsdItem(hk(tr("PIP channel +")), osUser4)); Add(new cOsdItem(hk(tr("PIP channel -")), osUser5)); if (PipReceiver) { Add(new cOsdItem(hk(tr("PIP on/swap channels: swap")), osUser6)); } else { Add(new cOsdItem(hk(tr("PIP on/swap channels: on")), osUser6)); } if (PipAltPosition) { Add(new cOsdItem(hk(tr("PIP swap position: normal")), osUser7)); } else { Add(new cOsdItem(hk(tr("PIP swap position: alternative")), osUser7)); } Add(new cOsdItem(hk(tr("PIP close")), osUser8)); #endif Add(new cOsdItem(NULL, osUnknown, false)); Add(new cOsdItem(NULL, osUnknown, false)); GetStats(&missed, &duped, &dropped, &counter); Add(new cOsdItem(cString::sprintf(tr (" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped, counter), osUnknown, false)); SetCurrent(Get(current)); // restore selected menu entry Display(); // display build menu } /** ** Soft device menu constructor. */ cSoftHdMenu::cSoftHdMenu(const char *title, int c0, int c1, int c2, int c3, int c4) :cOsdMenu(title, c0, c1, c2, c3, c4) { HotkeyState = HksInitial; Create(); } /** ** Soft device menu destructor. */ cSoftHdMenu::~cSoftHdMenu() { } /** ** Handle hot key commands. ** ** @param code numeric hot key code */ static void HandleHotkey(int code) { switch (code) { case 10: // disable pass-through AudioPassthroughState = 0; CodecSetAudioPassthrough(0); Skins.QueueMessage(mtInfo, tr("pass-through disabled")); break; case 11: // enable pass-through // note: you can't enable, without configured pass-through AudioPassthroughState = 1; CodecSetAudioPassthrough(ConfigAudioPassthrough); Skins.QueueMessage(mtInfo, tr("pass-through enabled")); break; case 12: // toggle pass-through AudioPassthroughState ^= 1; if (AudioPassthroughState) { CodecSetAudioPassthrough(ConfigAudioPassthrough); Skins.QueueMessage(mtInfo, tr("pass-through enabled")); } else { CodecSetAudioPassthrough(0); Skins.QueueMessage(mtInfo, tr("pass-through disabled")); } break; case 13: // decrease audio delay ConfigVideoAudioDelay -= 10; VideoSetAudioDelay(ConfigVideoAudioDelay); Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay)); break; case 14: // increase audio delay ConfigVideoAudioDelay += 10; VideoSetAudioDelay(ConfigVideoAudioDelay); Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay)); break; case 15: ConfigAudioDownmix ^= 1; fprintf(stderr, "toggle downmix\n"); CodecSetAudioDownmix(ConfigAudioDownmix); if (ConfigAudioDownmix) { Skins.QueueMessage(mtInfo, tr("surround downmix enabled")); } else { Skins.QueueMessage(mtInfo, tr("surround downmix disabled")); } ResetChannelId(); break; case 20: // disable full screen VideoSetFullscreen(0); break; case 21: // enable full screen VideoSetFullscreen(1); break; case 22: // toggle full screen VideoSetFullscreen(-1); break; case 23: // disable auto-crop ConfigAutoCropEnabled = 0; VideoSetAutoCrop(0, ConfigAutoCropDelay, ConfigAutoCropTolerance); Skins.QueueMessage(mtInfo, tr("auto-crop disabled and freezed")); break; case 24: // enable auto-crop ConfigAutoCropEnabled = 1; if (!ConfigAutoCropInterval) { ConfigAutoCropInterval = 50; } VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); Skins.QueueMessage(mtInfo, tr("auto-crop enabled")); break; case 25: // toggle auto-crop ConfigAutoCropEnabled ^= 1; // no interval configured, use some default if (!ConfigAutoCropInterval) { ConfigAutoCropInterval = 50; } VideoSetAutoCrop(ConfigAutoCropEnabled * ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance); if (ConfigAutoCropEnabled) { Skins.QueueMessage(mtInfo, tr("auto-crop enabled")); } else { Skins.QueueMessage(mtInfo, tr("auto-crop disabled and freezed")); } break; case 30: // change 4:3 -> window mode case 31: case 32: VideoSet4to3DisplayFormat(code - 30); break; case 39: // rotate 4:3 -> window mode VideoSet4to3DisplayFormat(-1); break; case 40: // change 16:9 -> window mode case 41: case 42: VideoSetOtherDisplayFormat(code - 40); break; case 49: // rotate 16:9 -> window mode VideoSetOtherDisplayFormat(-1); break; #ifdef USE_PIP case 102: // PIP toggle TogglePip(); break; case 104: PipNextAvailableChannel(1); break; case 105: PipNextAvailableChannel(-1); break; case 106: SwapPipChannels(); break; case 107: SwapPipPosition(); break; case 108: DelPip(); PipChannelNr = 0; break; #endif default: esyslog(tr("[softhddev]: hot key %d is not supported\n"), code); break; } } /** ** Handle key event. ** ** @param key key event */ eOSState cSoftHdMenu::ProcessKey(eKeys key) { eOSState state; //dsyslog("[softhddev]%s: %x\n", __FUNCTION__, key); switch (HotkeyState) { case HksInitial: // initial state, waiting for hot key if (key == kBlue) { HotkeyState = HksBlue; // blue button return osContinue; } if (key == kRed) { HotkeyState = HksRed; // red button return osContinue; } break; case HksBlue: // blue and first number if (k0 <= key && key <= k9) { HotkeyCode = key - k0; HotkeyState = HksBlue1; return osContinue; } HotkeyState = HksInitial; break; case HksBlue1: // blue and second number/enter if (k0 <= key && key <= k9) { HotkeyCode *= 10; HotkeyCode += key - k0; HotkeyState = HksInitial; dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode); HandleHotkey(HotkeyCode); return osEnd; } if (key == kOk) { HotkeyState = HksInitial; dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode); HandleHotkey(HotkeyCode); return osEnd; } HotkeyState = HksInitial; case HksRed: // red and first number if (k0 <= key && key <= k9) { HotkeyCode = 100 + key - k0; HotkeyState = HksInitial; HandleHotkey(HotkeyCode); return osEnd; } HotkeyState = HksInitial; break; } // call standard function state = cOsdMenu::ProcessKey(key); switch (state) { case osUser1: // not already suspended if (SuspendMode == NOT_SUSPENDED && !cSoftHdControl::Player) { cControl::Launch(new cSoftHdControl); cControl::Attach(); if (ConfigDetachFromMainMenu) { Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; } else { Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; } if (ShutdownHandler.GetUserInactiveTime()) { dsyslog("[softhddev]%s: set user inactive\n", __FUNCTION__); ShutdownHandler.SetUserInactive(); } } return osEnd; #ifdef USE_PIP case osUser2: TogglePip(); return osEnd; case osUser4: PipNextAvailableChannel(1); return osEnd; case osUser5: PipNextAvailableChannel(-1); return osEnd; case osUser6: SwapPipChannels(); return osEnd; case osUser7: SwapPipPosition(); return osEnd; case osUser8: DelPip(); PipChannelNr = 0; return osEnd; #endif default: Create(); break; } return state; } ////////////////////////////////////////////////////////////////////////////// // cDevice ////////////////////////////////////////////////////////////////////////////// class cSoftHdDevice:public cDevice { public: cSoftHdDevice(void); virtual ~ cSoftHdDevice(void); virtual bool HasDecoder(void) const; virtual bool CanReplay(void) const; virtual bool SetPlayMode(ePlayMode); #if APIVERSNUM >= 20103 virtual void TrickSpeed(int, bool); #else virtual void TrickSpeed(int); #endif virtual void Clear(void); virtual void Play(void); virtual void Freeze(void); virtual void Mute(void); virtual void StillPicture(const uchar *, int); virtual bool Poll(cPoller &, int = 0); virtual bool Flush(int = 0); virtual int64_t GetSTC(void); #if APIVERSNUM >= 10733 virtual cRect CanScaleVideo(const cRect &, int = taCenter); virtual void ScaleVideo(const cRect & = cRect::Null); #endif virtual void SetVideoDisplayFormat(eVideoDisplayFormat); virtual void SetVideoFormat(bool); virtual void GetVideoSize(int &, int &, double &); virtual void GetOsdSize(int &, int &, double &); virtual int PlayVideo(const uchar *, int); virtual int PlayAudio(const uchar *, int, uchar); #ifdef USE_TS_VIDEO virtual int PlayTsVideo(const uchar *, int); #endif #if !defined(USE_AUDIO_THREAD) || !defined(NO_TS_AUDIO) virtual int PlayTsAudio(const uchar *, int); #endif virtual void SetAudioChannelDevice(int); virtual int GetAudioChannelDevice(void); virtual void SetDigitalAudioDevice(bool); virtual void SetAudioTrackDevice(eTrackType); virtual void SetVolumeDevice(int); // Image Grab facilities virtual uchar *GrabImage(int &, bool, int, int, int); #ifdef USE_VDR_SPU // SPU facilities private: cDvbSpuDecoder * spuDecoder; public: virtual cSpuDecoder * GetSpuDecoder(void); #endif protected: virtual void MakePrimaryDevice(bool); }; /** ** Constructor device. */ cSoftHdDevice::cSoftHdDevice(void) { //dsyslog("[softhddev]%s\n", __FUNCTION__); #ifdef USE_VDR_SPU spuDecoder = NULL; #endif } /** ** Destructor device. */ cSoftHdDevice::~cSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); #ifdef USE_VDR_SPU delete spuDecoder; #endif } /** ** Informs a device that it will be the primary device. ** ** @param on flag if becoming or loosing primary */ void cSoftHdDevice::MakePrimaryDevice(bool on) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, on); cDevice::MakePrimaryDevice(on); if (on) { new cSoftOsdProvider(); if (SuspendMode == SUSPEND_DETACHED) { Resume(); SuspendMode = NOT_SUSPENDED; } } else if (SuspendMode == NOT_SUSPENDED) { Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; } } #ifdef USE_VDR_SPU /** ** Get the device SPU decoder. ** ** @returns a pointer to the device's SPU decoder (or NULL, if this ** device doesn't have an SPU decoder) */ cSpuDecoder *cSoftHdDevice::GetSpuDecoder(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); if (!spuDecoder && IsPrimaryDevice()) { spuDecoder = new cDvbSpuDecoder(); } return spuDecoder; } #endif /** ** Tells whether this device has a MPEG decoder. */ bool cSoftHdDevice::HasDecoder(void) const { return true; } /** ** Returns true if this device can currently start a replay session. */ bool cSoftHdDevice::CanReplay(void) const { return true; } /** ** Sets the device into the given play mode. ** ** @param play_mode new play mode (Audio/Video/External...) */ bool cSoftHdDevice::SetPlayMode(ePlayMode play_mode) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, play_mode); switch (play_mode) { case pmAudioVideo: break; case pmAudioOnly: case pmAudioOnlyBlack: break; case pmVideoOnly: break; case pmNone: break; case pmExtern_THIS_SHOULD_BE_AVOIDED: dsyslog("[softhddev] play mode external\n"); // FIXME: what if already suspended? Suspend(1, 1, 0); SuspendMode = SUSPEND_EXTERNAL; return true; default: dsyslog("[softhddev] playmode not implemented... %d\n", play_mode); break; } if (SuspendMode != NOT_SUSPENDED) { if (SuspendMode != SUSPEND_EXTERNAL) { return false; } Resume(); SuspendMode = NOT_SUSPENDED; } return::SetPlayMode(play_mode); } /** ** Gets the current System Time Counter, which can be used to ** synchronize audio, video and subtitles. */ int64_t cSoftHdDevice::GetSTC(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return::GetSTC(); } /** ** Set trick play speed. ** ** Every single frame shall then be displayed the given number of ** times. ** ** @param speed trick speed ** @param forward flag forward direction */ #if APIVERSNUM >= 20103 void cSoftHdDevice::TrickSpeed(int speed, bool forward) { dsyslog("[softhddev]%s: %d %d\n", __FUNCTION__, speed, forward); ::TrickSpeed(speed); } #else void cSoftHdDevice::TrickSpeed(int speed) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, speed); ::TrickSpeed(speed); } #endif /** ** Clears all video and audio data from the device. */ void cSoftHdDevice::Clear(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Clear(); ::Clear(); } /** ** Sets the device into play mode (after a previous trick mode) */ void cSoftHdDevice::Play(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Play(); ::Play(); } /** ** Puts the device into "freeze frame" mode. */ void cSoftHdDevice::Freeze(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Freeze(); ::Freeze(); } /** ** Turns off audio while replaying. */ void cSoftHdDevice::Mute(void) { dsyslog("[softhddev]%s:\n", __FUNCTION__); cDevice::Mute(); ::Mute(); } /** ** Display the given I-frame as a still picture. ** ** @param data pes or ts data of a frame ** @param length length of data area */ void cSoftHdDevice::StillPicture(const uchar * data, int length) { dsyslog("[softhddev]%s: %s %p %d\n", __FUNCTION__, data[0] == 0x47 ? "ts" : "pes", data, length); if (data[0] == 0x47) { // ts sync cDevice::StillPicture(data, length); return; } ::StillPicture(data, length); } /** ** Check if the device is ready for further action. ** ** @param poller file handles (unused) ** @param timeout_ms timeout in ms to become ready ** ** @retval true if ready ** @retval false if busy */ bool cSoftHdDevice::Poll( __attribute__ ((unused)) cPoller & poller, int timeout_ms) { //dsyslog("[softhddev]%s: %d\n", __FUNCTION__, timeout_ms); return::Poll(timeout_ms); } /** ** Flush the device output buffers. ** ** @param timeout_ms timeout in ms to become ready */ bool cSoftHdDevice::Flush(int timeout_ms) { dsyslog("[softhddev]%s: %d ms\n", __FUNCTION__, timeout_ms); return::Flush(timeout_ms); } // ---------------------------------------------------------------------------- /** ** Sets the video display format to the given one (only useful if this ** device has an MPEG decoder). */ void cSoftHdDevice:: SetVideoDisplayFormat(eVideoDisplayFormat video_display_format) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_display_format); cDevice::SetVideoDisplayFormat(video_display_format); #if 0 static int last = -1; // called on every channel switch, no need to kill osd... if (last != video_display_format) { last = video_display_format; ::VideoSetDisplayFormat(video_display_format); cSoftOsd::Dirty = 1; } #endif } /** ** Sets the output video format to either 16:9 or 4:3 (only useful ** if this device has an MPEG decoder). ** ** Should call SetVideoDisplayFormat. ** ** @param video_format16_9 flag true 16:9. */ void cSoftHdDevice::SetVideoFormat(bool video_format16_9) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_format16_9); // FIXME: 4:3 / 16:9 video format not supported. SetVideoDisplayFormat(eVideoDisplayFormat(Setup.VideoDisplayFormat)); } /** ** Returns the width, height and video_aspect ratio of the currently ** displayed video material. ** ** @note the video_aspect is used to scale the subtitle. */ void cSoftHdDevice::GetVideoSize(int &width, int &height, double &video_aspect) { ::GetVideoSize(&width, &height, &video_aspect); } /** ** Returns the width, height and pixel_aspect ratio the OSD. ** ** FIXME: Called every second, for nothing (no OSD displayed)? */ void cSoftHdDevice::GetOsdSize(int &width, int &height, double &pixel_aspect) { ::GetOsdSize(&width, &height, &pixel_aspect); } // ---------------------------------------------------------------------------- /** ** Play a audio packet. ** ** @param data exactly one complete PES packet (which is incomplete) ** @param length length of PES packet ** @param id type of audio data this packet holds */ int cSoftHdDevice::PlayAudio(const uchar * data, int length, uchar id) { //dsyslog("[softhddev]%s: %p %p %d %d\n", __FUNCTION__, this, data, length, id); return::PlayAudio(data, length, id); } void cSoftHdDevice::SetAudioTrackDevice( __attribute__ ((unused)) eTrackType type) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); } void cSoftHdDevice::SetDigitalAudioDevice( __attribute__ ((unused)) bool on) { //dsyslog("[softhddev]%s: %s\n", __FUNCTION__, on ? "true" : "false"); } void cSoftHdDevice::SetAudioChannelDevice( __attribute__ ((unused)) int audio_channel) { //dsyslog("[softhddev]%s: %d\n", __FUNCTION__, audio_channel); } int cSoftHdDevice::GetAudioChannelDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return 0; } /** ** Sets the audio volume on this device (Volume = 0...255). ** ** @param volume device volume */ void cSoftHdDevice::SetVolumeDevice(int volume) { dsyslog("[softhddev]%s: %d\n", __FUNCTION__, volume); ::SetVolumeDevice(volume); } // ---------------------------------------------------------------------------- /** ** Play a video packet. ** ** @param data exactly one complete PES packet (which is incomplete) ** @param length length of PES packet */ int cSoftHdDevice::PlayVideo(const uchar * data, int length) { //dsyslog("[softhddev]%s: %p %d\n", __FUNCTION__, data, length); return::PlayVideo(data, length); } #ifdef USE_TS_VIDEO /** ** Play a TS video packet. ** ** @param data ts data buffer ** @param length ts packet length (188) */ int cSoftHdDevice::PlayTsVideo(const uchar * data, int length) { } #endif #if !defined(USE_AUDIO_THREAD) || !defined(NO_TS_AUDIO) /** ** Play a TS audio packet. ** ** @param data ts data buffer ** @param length ts packet length (188) */ int cSoftHdDevice::PlayTsAudio(const uchar * data, int length) { #ifndef NO_TS_AUDIO return::PlayTsAudio(data, length); #else AudioPoller(); return cDevice::PlayTsAudio(data, length); #endif } #endif /** ** Grabs the currently visible screen image. ** ** @param size size of the returned data ** @param jpeg flag true, create JPEG data ** @param quality JPEG quality ** @param width number of horizontal pixels in the frame ** @param height number of vertical pixels in the frame */ uchar *cSoftHdDevice::GrabImage(int &size, bool jpeg, int quality, int width, int height) { dsyslog("[softhddev]%s: %d, %d, %d, %dx%d\n", __FUNCTION__, size, jpeg, quality, width, height); if (SuspendMode != NOT_SUSPENDED) { return NULL; } if (quality < 0) { // caller should care, but fix it quality = 95; } return::GrabImage(&size, jpeg, quality, width, height); } #if APIVERSNUM >= 10733 /** ** Ask the output, if it can scale video. ** ** @param rect requested video window rectangle ** ** @returns the real rectangle or cRect:Null if invalid. */ cRect cSoftHdDevice::CanScaleVideo(const cRect & rect, __attribute__ ((unused)) int alignment) { return rect; } /** ** Scale the currently shown video. ** ** @param rect video window rectangle */ void cSoftHdDevice::ScaleVideo(const cRect & rect) { #ifdef OSD_DEBUG dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, rect.Width(), rect.Height(), rect.X(), rect.Y()); #endif ::ScaleVideo(rect.X(), rect.Y(), rect.Width(), rect.Height()); } #endif /** ** Call rgb to jpeg for C Plugin. */ extern "C" uint8_t * CreateJpeg(uint8_t * image, int *size, int quality, int width, int height) { return (uint8_t *) RgbToJpeg((uchar *) image, width, height, *size, quality); } ////////////////////////////////////////////////////////////////////////////// // cPlugin ////////////////////////////////////////////////////////////////////////////// class cPluginSoftHdDevice:public cPlugin { public: cPluginSoftHdDevice(void); virtual ~ cPluginSoftHdDevice(void); virtual const char *Version(void); virtual const char *Description(void); virtual const char *CommandLineHelp(void); virtual bool ProcessArgs(int, char *[]); virtual bool Initialize(void); virtual bool Start(void); virtual void Stop(void); virtual void Housekeeping(void); virtual void MainThreadHook(void); virtual const char *MainMenuEntry(void); virtual cOsdObject *MainMenuAction(void); virtual cMenuSetupPage *SetupMenu(void); virtual bool SetupParse(const char *, const char *); virtual bool Service(const char *, void * = NULL); virtual const char **SVDRPHelpPages(void); virtual cString SVDRPCommand(const char *, const char *, int &); }; /** ** Initialize any member variables here. ** ** @note DON'T DO ANYTHING ELSE THAT MAY HAVE SIDE EFFECTS, REQUIRE GLOBAL ** VDR OBJECTS TO EXIST OR PRODUCE ANY OUTPUT! */ cPluginSoftHdDevice::cPluginSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); } /** ** Clean up after yourself! */ cPluginSoftHdDevice::~cPluginSoftHdDevice(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); ::SoftHdDeviceExit(); // keep ConfigX11Display ... } /** ** Return plugin version number. ** ** @returns version number as constant string. */ const char *cPluginSoftHdDevice::Version(void) { return VERSION; } /** ** Return plugin short description. ** ** @returns short description as constant string. */ const char *cPluginSoftHdDevice::Description(void) { return tr(DESCRIPTION); } /** ** Return a string that describes all known command line options. ** ** @returns command line help as constant string. */ const char *cPluginSoftHdDevice::CommandLineHelp(void) { return::CommandLineHelp(); } /** ** Process the command line arguments. */ bool cPluginSoftHdDevice::ProcessArgs(int argc, char *argv[]) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return::ProcessArgs(argc, argv); } /** ** Initializes the DVB devices. ** ** Must be called before accessing any DVB functions. ** ** @returns true if any devices are available. */ bool cPluginSoftHdDevice::Initialize(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); MyDevice = new cSoftHdDevice(); return true; } /** ** Start any background activities the plugin shall perform. */ bool cPluginSoftHdDevice::Start(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); if (!MyDevice->IsPrimaryDevice()) { isyslog("[softhddev] softhddevice %d is not the primary device!", MyDevice->DeviceNumber()); if (ConfigMakePrimary) { // Must be done in the main thread dsyslog("[softhddev] makeing softhddevice %d the primary device!", MyDevice->DeviceNumber()); DoMakePrimary = MyDevice->DeviceNumber() + 1; } } switch (::Start()) { case 1: //cControl::Launch(new cSoftHdControl); //cControl::Attach(); // FIXME: VDR overwrites the control SuspendMode = SUSPEND_NORMAL; break; case -1: SuspendMode = SUSPEND_DETACHED; break; case 0: default: break; } return true; } /** ** Shutdown plugin. Stop any background activities the plugin is ** performing. */ void cPluginSoftHdDevice::Stop(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); ::Stop(); } /** ** Perform any cleanup or other regular tasks. */ void cPluginSoftHdDevice::Housekeeping(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); // check if user is inactive, automatic enter suspend mode // FIXME: cControl prevents shutdown, disable this until fixed if (0 && SuspendMode == NOT_SUSPENDED && ShutdownHandler.IsUserInactive()) { // don't overwrite already suspended suspend mode cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; } ::Housekeeping(); } /** ** Create main menu entry. */ const char *cPluginSoftHdDevice::MainMenuEntry(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return ConfigHideMainMenuEntry ? NULL : tr(MAINMENUENTRY); } /** ** Perform the action when selected from the main VDR menu. */ cOsdObject *cPluginSoftHdDevice::MainMenuAction(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return new cSoftHdMenu("SoftHdDevice"); } /** ** Called for every plugin once during every cycle of VDR's main program ** loop. */ void cPluginSoftHdDevice::MainThreadHook(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); if (DoMakePrimary) { dsyslog("[softhddev]%s: switching primary device to %d\n", __FUNCTION__, DoMakePrimary); cDevice::SetPrimaryDevice(DoMakePrimary); DoMakePrimary = 0; } ::MainThreadHook(); } /** ** Return our setup menu. */ cMenuSetupPage *cPluginSoftHdDevice::SetupMenu(void) { //dsyslog("[softhddev]%s:\n", __FUNCTION__); return new cMenuSetupSoft; } /** ** Parse setup parameters ** ** @param name paramter name (case sensetive) ** @param value value as string ** ** @returns true if the parameter is supported. */ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value) { int i; //dsyslog("[softhddev]%s: '%s' = '%s'\n", __FUNCTION__, name, value); if (!strcasecmp(name, "MakePrimary")) { ConfigMakePrimary = atoi(value); return true; } if (!strcasecmp(name, "HideMainMenuEntry")) { ConfigHideMainMenuEntry = atoi(value); return true; } if (!strcasecmp(name, "DetachFromMainMenu")) { ConfigDetachFromMainMenu = atoi(value); return true; } if (!strcasecmp(name, "Osd.Width")) { ConfigOsdWidth = atoi(value); VideoSetOsdSize(ConfigOsdWidth, ConfigOsdHeight); return true; } if (!strcasecmp(name, "Osd.Height")) { ConfigOsdHeight = atoi(value); VideoSetOsdSize(ConfigOsdWidth, ConfigOsdHeight); return true; } if (!strcasecmp(name, "Suspend.Close")) { ConfigSuspendClose = atoi(value); return true; } if (!strcasecmp(name, "Suspend.X11")) { ConfigSuspendX11 = atoi(value); return true; } if (!strcasecmp(name, "Video4to3DisplayFormat")) { Config4to3DisplayFormat = atoi(value); VideoSet4to3DisplayFormat(Config4to3DisplayFormat); return true; } if (!strcasecmp(name, "VideoOtherDisplayFormat")) { ConfigOtherDisplayFormat = atoi(value); VideoSetOtherDisplayFormat(ConfigOtherDisplayFormat); return true; } if (!strcasecmp(name, "Background")) { VideoSetBackground(ConfigVideoBackground = strtoul(value, NULL, 0)); return true; } if (!strcasecmp(name, "StudioLevels")) { VideoSetStudioLevels(ConfigVideoStudioLevels = atoi(value)); return true; } if (!strcasecmp(name, "60HzMode")) { VideoSet60HzMode(ConfigVideo60HzMode = atoi(value)); return true; } if (!strcasecmp(name, "SoftStartSync")) { VideoSetSoftStartSync(ConfigVideoSoftStartSync = atoi(value)); return true; } if (!strcasecmp(name, "BlackPicture")) { VideoSetBlackPicture(ConfigVideoBlackPicture = atoi(value)); return true; } if (!strcasecmp(name, "ClearOnSwitch")) { ConfigVideoClearOnSwitch = atoi(value); return true; } if (!strcasecmp(name, "Brightness")) { VideoSetBrightness(ConfigVideoBrightness = atoi(value)); return true; } if (!strcasecmp(name, "Contrast")) { VideoSetContrast(ConfigVideoContrast = atoi(value)); return true; } if (!strcasecmp(name, "Saturation")) { VideoSetSaturation(ConfigVideoSaturation = atoi(value)); return true; } if (!strcasecmp(name, "Hue")) { VideoSetHue(ConfigVideoHue = atoi(value)); return true; } for (i = 0; i < RESOLUTIONS; ++i) { char buf[128]; snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Scaling"); if (!strcasecmp(name, buf)) { ConfigVideoScaling[i] = atoi(value); VideoSetScaling(ConfigVideoScaling); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Deinterlace"); if (!strcasecmp(name, buf)) { ConfigVideoDeinterlace[i] = atoi(value); VideoSetDeinterlace(ConfigVideoDeinterlace); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace"); if (!strcasecmp(name, buf)) { ConfigVideoSkipChromaDeinterlace[i] = atoi(value); VideoSetSkipChromaDeinterlace(ConfigVideoSkipChromaDeinterlace); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "InverseTelecine"); if (!strcasecmp(name, buf)) { ConfigVideoInverseTelecine[i] = atoi(value); VideoSetInverseTelecine(ConfigVideoInverseTelecine); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Denoise"); if (!strcasecmp(name, buf)) { ConfigVideoDenoise[i] = atoi(value); VideoSetDenoise(ConfigVideoDenoise); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Sharpen"); if (!strcasecmp(name, buf)) { ConfigVideoSharpen[i] = atoi(value); VideoSetSharpen(ConfigVideoSharpen); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutTopBottom"); if (!strcasecmp(name, buf)) { ConfigVideoCutTopBottom[i] = atoi(value); VideoSetCutTopBottom(ConfigVideoCutTopBottom); return true; } snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "CutLeftRight"); if (!strcasecmp(name, buf)) { ConfigVideoCutLeftRight[i] = atoi(value); VideoSetCutLeftRight(ConfigVideoCutLeftRight); return true; } } if (!strcasecmp(name, "AutoCrop.Interval")) { VideoSetAutoCrop(ConfigAutoCropInterval = atoi(value), ConfigAutoCropDelay, ConfigAutoCropTolerance); ConfigAutoCropEnabled = ConfigAutoCropInterval != 0; return true; } if (!strcasecmp(name, "AutoCrop.Delay")) { VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay = atoi(value), ConfigAutoCropTolerance); return true; } if (!strcasecmp(name, "AutoCrop.Tolerance")) { VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance = atoi(value)); return true; } if (!strcasecmp(name, "AudioDelay")) { VideoSetAudioDelay(ConfigVideoAudioDelay = atoi(value)); return true; } if (!strcasecmp(name, "AudioDrift")) { CodecSetAudioDrift(ConfigAudioDrift = atoi(value)); return true; } if (!strcasecmp(name, "AudioPassthrough")) { int i; i = atoi(value); AudioPassthroughState = i > 0; ConfigAudioPassthrough = abs(i); if (AudioPassthroughState) { CodecSetAudioPassthrough(ConfigAudioPassthrough); } else { CodecSetAudioPassthrough(0); } return true; } if (!strcasecmp(name, "AudioDownmix")) { CodecSetAudioDownmix(ConfigAudioDownmix = atoi(value)); return true; } if (!strcasecmp(name, "AudioSoftvol")) { AudioSetSoftvol(ConfigAudioSoftvol = atoi(value)); return true; } if (!strcasecmp(name, "AudioNormalize")) { ConfigAudioNormalize = atoi(value); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); return true; } if (!strcasecmp(name, "AudioMaxNormalize")) { ConfigAudioMaxNormalize = atoi(value); AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize); return true; } if (!strcasecmp(name, "AudioCompression")) { ConfigAudioCompression = atoi(value); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); return true; } if (!strcasecmp(name, "AudioMaxCompression")) { ConfigAudioMaxCompression = atoi(value); AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression); return true; } if (!strcasecmp(name, "AudioStereoDescent")) { ConfigAudioStereoDescent = atoi(value); AudioSetStereoDescent(ConfigAudioStereoDescent); return true; } if (!strcasecmp(name, "AudioBufferTime")) { ConfigAudioBufferTime = atoi(value); return true; } if (!strcasecmp(name, "AudioAutoAES")) { ConfigAudioAutoAES = atoi(value); AudioSetAutoAES(ConfigAudioAutoAES); return true; } #ifdef USE_PIP if (!strcasecmp(name, "pip.X")) { ConfigPipX = atoi(value); return true; } if (!strcasecmp(name, "pip.Y")) { ConfigPipY = atoi(value); return true; } if (!strcasecmp(name, "pip.Width")) { ConfigPipWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Height")) { ConfigPipHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoX")) { ConfigPipVideoX = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoY")) { ConfigPipVideoY = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoWidth")) { ConfigPipVideoWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.VideoHeight")) { ConfigPipVideoHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.X")) { ConfigPipAltX = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Y")) { ConfigPipAltY = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Width")) { ConfigPipAltWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.Height")) { ConfigPipAltHeight = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoX")) { ConfigPipAltVideoX = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoY")) { ConfigPipAltVideoY = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoWidth")) { ConfigPipAltVideoWidth = atoi(value); return true; } if (!strcasecmp(name, "pip.Alt.VideoHeight")) { ConfigPipAltVideoHeight = atoi(value); return true; } #endif return false; } /** ** Receive requests or messages. ** ** @param id unique identification string that identifies the ** service protocol ** @param data custom data structure */ bool cPluginSoftHdDevice::Service(const char *id, void *data) { //dsyslog("[softhddev]%s: id %s\n", __FUNCTION__, id); if (strcmp(id, OSD_3DMODE_SERVICE) == 0) { SoftHDDevice_Osd3DModeService_v1_0_t *r; r = (SoftHDDevice_Osd3DModeService_v1_0_t *) data; VideoSetOsd3DMode(r->Mode); return true; } if (strcmp(id, ATMO_GRAB_SERVICE) == 0) { int width; int height; if (data == NULL) { return true; } if (SuspendMode != NOT_SUSPENDED) { return false; } SoftHDDevice_AtmoGrabService_v1_0_t *r = (SoftHDDevice_AtmoGrabService_v1_0_t *) data; if (r->structSize != sizeof(SoftHDDevice_AtmoGrabService_v1_0_t) || r->analyseSize < 64 || r->analyseSize > 256 || r->clippedOverscan < 0 || r->clippedOverscan > 200) { return false; } width = r->analyseSize * -1; // Internal marker for Atmo grab service height = r->clippedOverscan; r->img = VideoGrabService(&r->imgSize, &width, &height); if (r->img == NULL) { return false; } r->imgType = GRAB_IMG_RGBA_FORMAT_B8G8R8A8; r->width = width; r->height = height; return true; } if (strcmp(id, ATMO1_GRAB_SERVICE) == 0) { SoftHDDevice_AtmoGrabService_v1_1_t *r; if (!data) { return true; } if (SuspendMode != NOT_SUSPENDED) { return false; } r = (SoftHDDevice_AtmoGrabService_v1_1_t *) data; r->img = VideoGrabService(&r->size, &r->width, &r->height); if (!r->img) { return false; } return true; } return false; } //---------------------------------------------------------------------------- // cPlugin SVDRP //---------------------------------------------------------------------------- /** ** SVDRP commands help text. ** FIXME: translation? */ static const char *SVDRPHelpText[] = { "SUSP\n" "\040 Suspend plugin.\n\n" " The plugin is suspended to save energie. Depending on the setup\n" " 'softhddevice.Suspend.Close = 0' only the video and audio output\n" " is stopped or with 'softhddevice.Suspend.Close = 1' the video\n" " and audio devices are closed.\n" " If 'softhddevice.Suspend.X11 = 1' is set and the X11 server was\n" " started by the plugin, the X11 server would also be closed.\n" " (Stopping X11 while suspended isn't supported yet)\n", "RESU\n" "\040 Resume plugin.\n\n" " Resume the suspended plugin. The plugin could be suspended by\n" " the command line option '-s' or by a previous SUSP command.\n" " If the x11 server was stopped by the plugin, it will be\n" " restarted.", "DETA\n" "\040 Detach plugin.\n\n" " The plugin will be detached from the audio, video and DVB\n" " devices. Other programs or plugins can use them now.\n", "ATTA <-d display> <-a audio> <-p pass>\n" " Attach plugin.\n\n" " Attach the plugin to audio, video and DVB devices. Use:\n" " -d display\tdisplay of x11 server (fe. :0.0)\n" " -a audio\taudio device (fe. alsa: hw:0,0 oss: /dev/dsp)\n" " -p pass\t\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n", "PRIM \n" " Make the primary device.\n\n" " is the number of device. Without number softhddevice becomes\n" " the primary device. If becoming primary, the plugin is attached\n" " to the devices. If loosing primary, the plugin is detached from\n" " the devices.", "HOTK key\n" " Execute hotkey.\n\n" " key is the hotkey number, following are supported:\n" " 10: disable audio pass-through\n" " 11: enable audio pass-through\n" " 12: toggle audio pass-through\n" " 13: decrease audio delay by 10ms\n" " 14: increase audio delay by 10ms\n" " 15: toggle ac3 mixdown\n" " 20: disable fullscreen\n\040 21: enable fullscreen\n" " 22: toggle fullscreen\n" " 23: disable auto-crop\n\040 24: enable auto-crop\n" " 25: toggle auto-crop\n" " 30: stretch 4:3 to display\n\040 31: pillar box 4:3 in display\n" " 32: center cut-out 4:3 to display\n" " 39: rotate 4:3 to display zoom mode\n" " 40: stretch other aspect ratios to display\n" " 41: letter box other aspect ratios in display\n" " 42: center cut-out other aspect ratios to display\n" " 49: rotate other aspect ratios to display zoom mode\n", "STAT\n" "\040 Display SuspendMode of the plugin.\n\n" " reply code is 910 + SuspendMode\n" " SUSPEND_EXTERNAL == -1 (909)\n" " NOT_SUSPENDED == 0 (910)\n" " SUSPEND_NORMAL == 1 (911)\n" " SUSPEND_DETACHED == 2 (912)\n", "3DOF\n" "\040 3D OSD off.\n", "3DTB\n" "\040 3D OSD Top and Bottom.\n", "3DSB\n" "\040 3D OSD Side by Side.\n", "RAIS\n" "\040 Raise softhddevice window\n\n" " If Xserver is not started by softhddevice, the window which\n" " contains the softhddevice frontend will be raised to the front.\n", NULL }; /** ** Return SVDRP commands help pages. ** ** return a pointer to a list of help strings for all of the plugin's ** SVDRP commands. */ const char **cPluginSoftHdDevice::SVDRPHelpPages(void) { return SVDRPHelpText; } /** ** Handle SVDRP commands. ** ** @param command SVDRP command ** @param option all command arguments ** @param reply_code reply code */ cString cPluginSoftHdDevice::SVDRPCommand(const char *command, const char *option, __attribute__ ((unused)) int &reply_code) { if (!strcasecmp(command, "STAT")) { reply_code = 910 + SuspendMode; switch (SuspendMode) { case SUSPEND_EXTERNAL: return "SuspendMode is SUSPEND_EXTERNAL"; case NOT_SUSPENDED: return "SuspendMode is NOT_SUSPENDED"; case SUSPEND_NORMAL: return "SuspendMode is SUSPEND_NORMAL"; case SUSPEND_DETACHED: return "SuspendMode is SUSPEND_DETACHED"; } } if (!strcasecmp(command, "SUSP")) { if (cSoftHdControl::Player) { // already suspended return "SoftHdDevice already suspended"; } if (SuspendMode != NOT_SUSPENDED) { return "SoftHdDevice already detached"; } cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11); SuspendMode = SUSPEND_NORMAL; return "SoftHdDevice is suspended"; } if (!strcasecmp(command, "RESU")) { if (SuspendMode == NOT_SUSPENDED) { return "SoftHdDevice already resumed"; } if (SuspendMode != SUSPEND_NORMAL) { return "can't resume SoftHdDevice"; } if (ShutdownHandler.GetUserInactiveTime()) { ShutdownHandler.SetUserInactiveTimeout(); } if (cSoftHdControl::Player) { // suspended cControl::Shutdown(); // not need, if not suspended } Resume(); SuspendMode = NOT_SUSPENDED; return "SoftHdDevice is resumed"; } if (!strcasecmp(command, "DETA")) { if (SuspendMode == SUSPEND_DETACHED) { return "SoftHdDevice already detached"; } if (cSoftHdControl::Player) { // already suspended return "can't suspend SoftHdDevice already suspended"; } cControl::Launch(new cSoftHdControl); cControl::Attach(); Suspend(1, 1, 0); SuspendMode = SUSPEND_DETACHED; return "SoftHdDevice is detached"; } if (!strcasecmp(command, "ATTA")) { char *tmp; char *t; char *s; char *o; if (SuspendMode != SUSPEND_DETACHED) { return "can't attach SoftHdDevice not detached"; } if (!(tmp = strdup(option))) { return "out of memory"; } t = tmp; while ((s = strsep(&t, " \t\n\r"))) { if (!strcmp(s, "-d")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigX11Display); ConfigX11Display = strdup(o); X11DisplayName = ConfigX11Display; } else if (!strncmp(s, "-d", 2)) { free(ConfigX11Display); ConfigX11Display = strdup(s + 2); X11DisplayName = ConfigX11Display; } else if (!strcmp(s, "-a")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigAudioDevice); ConfigAudioDevice = strdup(o); AudioSetDevice(ConfigAudioDevice); } else if (!strncmp(s, "-a", 2)) { free(ConfigAudioDevice); ConfigAudioDevice = strdup(s + 2); AudioSetDevice(ConfigAudioDevice); } else if (!strcmp(s, "-p")) { if (!(o = strsep(&t, " \t\n\r"))) { free(tmp); return "missing option argument"; } free(ConfigPassthroughDevice); ConfigPassthroughDevice = strdup(o); AudioSetPassthroughDevice(ConfigPassthroughDevice); } else if (!strncmp(s, "-p", 2)) { free(ConfigPassthroughDevice); ConfigPassthroughDevice = strdup(s + 2); AudioSetPassthroughDevice(ConfigPassthroughDevice); } else if (*s) { free(tmp); return "unsupported option"; } } free(tmp); if (ShutdownHandler.GetUserInactiveTime()) { ShutdownHandler.SetUserInactiveTimeout(); } if (cSoftHdControl::Player) { // suspended cControl::Shutdown(); // not need, if not suspended } Resume(); SuspendMode = NOT_SUSPENDED; return "SoftHdDevice is attached"; } if (!strcasecmp(command, "HOTK")) { int hotk; hotk = strtol(option, NULL, 0); HandleHotkey(hotk); return "hot-key executed"; } if (!strcasecmp(command, "PRIM")) { int primary; primary = strtol(option, NULL, 0); if (!primary && MyDevice) { primary = MyDevice->DeviceNumber() + 1; } dsyslog("[softhddev] switching primary device to %d\n", primary); DoMakePrimary = primary; return "switching primary device requested"; } if (!strcasecmp(command, "3DOF")) { VideoSetOsd3DMode(0); return "3d off"; } if (!strcasecmp(command, "3DSB")) { VideoSetOsd3DMode(1); return "3d sbs"; } if (!strcasecmp(command, "3DTB")) { VideoSetOsd3DMode(2); return "3d tb"; } if (!strcasecmp(command, "RAIS")) { if (!ConfigStartX11Server) { VideoRaiseWindow(); } else { return "Raise not possible"; } return "Window raised"; } return NULL; } VDRPLUGINCREATOR(cPluginSoftHdDevice); // Don't touch this! vdr-plugin-softhddevice/video.c0000644000175000017500000116726612504327116016434 0ustar tobiastobias/// /// @file video.c @brief Video module /// /// Copyright (c) 2009 - 2014 by Johns. All Rights Reserved. /// /// Contributor(s): /// /// License: AGPLv3 /// /// This program is free software: you can redistribute it and/or modify /// it under the terms of the GNU Affero General Public License as /// published by the Free Software Foundation, either version 3 of the /// License. /// /// This program is distributed in the hope that it will be useful, /// but WITHOUT ANY WARRANTY; without even the implied warranty of /// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the /// GNU Affero General Public License for more details. /// /// $Id: 08d0bca2cca51955a57d7a0c602556f892864898 $ ////////////////////////////////////////////////////////////////////////////// /// /// @defgroup Video The video module. /// /// This module contains all video rendering functions. /// /// @todo disable screen saver support /// /// Uses Xlib where it is needed for VA-API or vdpau. XCB is used for /// everything else. /// /// - X11 /// - OpenGL rendering /// - OpenGL rendering with GLX texture-from-pixmap /// - Xrender rendering /// /// @todo FIXME: use vaErrorStr for all VA-API errors. /// #define USE_XLIB_XCB ///< use xlib/xcb backend #define noUSE_SCREENSAVER ///< support disable screensaver #define USE_AUTOCROP ///< compile auto-crop support #define USE_GRAB ///< experimental grab code #define noUSE_GLX ///< outdated GLX code #define USE_DOUBLEBUFFER ///< use GLX double buffers //#define USE_VAAPI ///< enable vaapi support //#define USE_VDPAU ///< enable vdpau support //#define USE_BITMAP ///< use vdpau bitmap surface //#define AV_INFO ///< log a/v sync informations #ifndef AV_INFO_TIME #define AV_INFO_TIME (50 * 60) ///< a/v info every minute #endif #define USE_VIDEO_THREAD ///< run decoder in an own thread //#define USE_VIDEO_THREAD2 ///< run decoder+display in own threads #include #include #include #include #include #include #include #include #include #include #define _(str) gettext(str) ///< gettext shortcut #define _N(str) str ///< gettext_noop shortcut #ifdef USE_VIDEO_THREAD #ifndef __USE_GNU #define __USE_GNU #endif #include #include #include #ifndef HAVE_PTHREAD_NAME /// only available with newer glibc #define pthread_setname_np(thread, name) #endif #endif #ifdef USE_XLIB_XCB #include #include #include #include #include //#include #ifdef xcb_USE_GLX #include #endif //#include #ifdef USE_SCREENSAVER #include #include #endif //#include //#include //#include //#include //#include #include #ifdef XCB_ICCCM_NUM_WM_SIZE_HINTS_ELEMENTS #include #else // compatibility hack for old xcb-util /** * @brief Action on the _NET_WM_STATE property */ typedef enum { /* Remove/unset property */ XCB_EWMH_WM_STATE_REMOVE = 0, /* Add/set property */ XCB_EWMH_WM_STATE_ADD = 1, /* Toggle property */ XCB_EWMH_WM_STATE_TOGGLE = 2 } xcb_ewmh_wm_state_action_t; #endif #endif #ifdef USE_GLX #include // For GL_COLOR_BUFFER_BIT #include // only for gluErrorString #include #endif #ifdef USE_VAAPI #include #if VA_CHECK_VERSION(0,33,99) #include #endif #ifdef USE_GLX #include #endif #ifndef VA_SURFACE_ATTRIB_SETTABLE /// make source compatible with stable libva #define vaCreateSurfaces(d, f, w, h, s, ns, a, na) \ vaCreateSurfaces(d, w, h, f, ns, s) #endif #endif #ifdef USE_VDPAU #include #include #endif #include // support old ffmpeg versions <1.0 #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,18,102) #define AVCodecID CodecID #define AV_CODEC_ID_H263 CODEC_ID_H263 #define AV_CODEC_ID_H264 CODEC_ID_H264 #define AV_CODEC_ID_MPEG1VIDEO CODEC_ID_MPEG1VIDEO #define AV_CODEC_ID_MPEG2VIDEO CODEC_ID_MPEG2VIDEO #define AV_CODEC_ID_MPEG4 CODEC_ID_MPEG4 #define AV_CODEC_ID_VC1 CODEC_ID_VC1 #define AV_CODEC_ID_WMV3 CODEC_ID_WMV3 #endif #include #include #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54,86,100) /// /// ffmpeg version 1.1.1 calls get_format with zero width and height /// for H264 codecs. /// since version 1.1.3 get_format is called twice. /// ffmpeg 1.2 still buggy /// #define FFMPEG_BUG1_WORKAROUND ///< get_format bug workaround #endif #include "iatomic.h" // portable atomic_t #include "misc.h" #include "video.h" #include "audio.h" #ifdef USE_XLIB_XCB //---------------------------------------------------------------------------- // Declarations //---------------------------------------------------------------------------- /// /// Video resolutions selector. /// typedef enum _video_resolutions_ { VideoResolution576i, ///< ...x576 interlaced VideoResolution720p, ///< ...x720 progressive VideoResolutionFake1080i, ///< 1280x1080 1440x1080 interlaced VideoResolution1080i, ///< 1920x1080 interlaced VideoResolutionMax ///< number of resolution indexs } VideoResolutions; /// /// Video deinterlace modes. /// typedef enum _video_deinterlace_modes_ { VideoDeinterlaceBob, ///< bob deinterlace VideoDeinterlaceWeave, ///< weave deinterlace VideoDeinterlaceTemporal, ///< temporal deinterlace VideoDeinterlaceTemporalSpatial, ///< temporal spatial deinterlace VideoDeinterlaceSoftBob, ///< software bob deinterlace VideoDeinterlaceSoftSpatial, ///< software spatial deinterlace } VideoDeinterlaceModes; /// /// Video scaleing modes. /// typedef enum _video_scaling_modes_ { VideoScalingNormal, ///< normal scaling VideoScalingFast, ///< fastest scaling VideoScalingHQ, ///< high quality scaling VideoScalingAnamorphic, ///< anamorphic scaling } VideoScalingModes; /// /// Video zoom modes. /// typedef enum _video_zoom_modes_ { VideoNormal, ///< normal VideoStretch, ///< stretch to all edges VideoCenterCutOut, ///< center and cut out VideoAnamorphic, ///< anamorphic scaled (unsupported) } VideoZoomModes; /// /// Video color space conversions. /// typedef enum _video_color_space_ { VideoColorSpaceNone, ///< no conversion VideoColorSpaceBt601, ///< ITU.BT-601 Y'CbCr VideoColorSpaceBt709, ///< ITU.BT-709 HDTV Y'CbCr VideoColorSpaceSmpte240 ///< SMPTE-240M Y'PbPr } VideoColorSpace; /// /// Video output module structure and typedef. /// typedef struct _video_module_ { const char *Name; ///< video output module name char Enabled; ///< flag output module enabled /// allocate new video hw decoder VideoHwDecoder *(*const NewHwDecoder)(VideoStream *); void (*const DelHwDecoder) (VideoHwDecoder *); unsigned (*const GetSurface) (VideoHwDecoder *, const AVCodecContext *); void (*const ReleaseSurface) (VideoHwDecoder *, unsigned); enum PixelFormat (*const get_format) (VideoHwDecoder *, AVCodecContext *, const enum PixelFormat *); void (*const RenderFrame) (VideoHwDecoder *, const AVCodecContext *, const AVFrame *); void *(*const GetHwAccelContext)(VideoHwDecoder *); void (*const SetClock) (VideoHwDecoder *, int64_t); int64_t(*const GetClock) (const VideoHwDecoder *); void (*const SetClosing) (const VideoHwDecoder *); void (*const ResetStart) (const VideoHwDecoder *); void (*const SetTrickSpeed) (const VideoHwDecoder *, int); uint8_t *(*const GrabOutput)(int *, int *, int *); void (*const GetStats) (VideoHwDecoder *, int *, int *, int *, int *); void (*const SetBackground) (uint32_t); void (*const SetVideoMode) (void); void (*const ResetAutoCrop) (void); /// module display handler thread void (*const DisplayHandlerThread) (void); void (*const OsdClear) (void); ///< clear OSD /// draw OSD ARGB area void (*const OsdDrawARGB) (int, int, int, int, const uint8_t *); void (*const OsdInit) (int, int); ///< initialize OSD void (*const OsdExit) (void); ///< cleanup OSD int (*const Init) (const char *); ///< initialize video output module void (*const Exit) (void); ///< cleanup video output module } VideoModule; //---------------------------------------------------------------------------- // Defines //---------------------------------------------------------------------------- #define CODEC_SURFACES_MAX 31 ///< maximal of surfaces #define CODEC_SURFACES_DEFAULT 21 ///< default of surfaces // FIXME: video-xvba only supports 14 #define xCODEC_SURFACES_DEFAULT 14 ///< default of surfaces #define CODEC_SURFACES_MPEG2 3 ///< 1 decode, up to 2 references #define CODEC_SURFACES_MPEG4 3 ///< 1 decode, up to 2 references #define CODEC_SURFACES_H264 21 ///< 1 decode, up to 20 references #define CODEC_SURFACES_VC1 3 ///< 1 decode, up to 2 references #define VIDEO_SURFACES_MAX 4 ///< video output surfaces for queue #define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page //---------------------------------------------------------------------------- // Variables //---------------------------------------------------------------------------- char VideoIgnoreRepeatPict; ///< disable repeat pict warning static const char *VideoDriverName; ///< video output device static Display *XlibDisplay; ///< Xlib X11 display static xcb_connection_t *Connection; ///< xcb connection static xcb_colormap_t VideoColormap; ///< video colormap static xcb_window_t VideoWindow; ///< video window static xcb_screen_t const *VideoScreen; ///< video screen static uint32_t VideoBlankTick; ///< blank cursor timer static xcb_pixmap_t VideoCursorPixmap; ///< blank curosr pixmap static xcb_cursor_t VideoBlankCursor; ///< empty invisible cursor static int VideoWindowX; ///< video output window x coordinate static int VideoWindowY; ///< video outout window y coordinate static unsigned VideoWindowWidth; ///< video output window width static unsigned VideoWindowHeight; ///< video output window height static const VideoModule NoopModule; ///< forward definition of noop module /// selected video module static const VideoModule *VideoUsedModule = &NoopModule; signed char VideoHardwareDecoder = -1; ///< flag use hardware decoder static char VideoSurfaceModesChanged; ///< flag surface modes changed /// flag use transparent OSD. static const char VideoTransparentOsd = 1; static uint32_t VideoBackground; ///< video background color static char VideoStudioLevels; ///< flag use studio levels /// Default deinterlace mode. static VideoDeinterlaceModes VideoDeinterlace[VideoResolutionMax]; /// Default number of deinterlace surfaces static const int VideoDeinterlaceSurfaces = 4; /// Default skip chroma deinterlace flag (VDPAU only). static char VideoSkipChromaDeinterlace[VideoResolutionMax]; /// Default inverse telecine flag (VDPAU only). static char VideoInverseTelecine[VideoResolutionMax]; /// Default amount of noise reduction algorithm to apply (0 .. 1000). static int VideoDenoise[VideoResolutionMax]; /// Default amount of sharpening, or blurring, to apply (-1000 .. 1000). static int VideoSharpen[VideoResolutionMax]; /// Default cut top and bottom in pixels static int VideoCutTopBottom[VideoResolutionMax]; /// Default cut left and right in pixels static int VideoCutLeftRight[VideoResolutionMax]; /// Color space ITU-R BT.601, ITU-R BT.709, ... static const VideoColorSpace VideoColorSpaces[VideoResolutionMax] = { VideoColorSpaceBt601, VideoColorSpaceBt709, VideoColorSpaceBt709, VideoColorSpaceBt709 }; /// Default scaling mode static VideoScalingModes VideoScaling[VideoResolutionMax]; /// Default audio/video delay int VideoAudioDelay; /// Default zoom mode for 4:3 static VideoZoomModes Video4to3ZoomMode; /// Default zoom mode for 16:9 and others static VideoZoomModes VideoOtherZoomMode; static char Video60HzMode; ///< handle 60hz displays static char VideoSoftStartSync; ///< soft start sync audio/video static const int VideoSoftStartFrames = 100; ///< soft start frames static char VideoShowBlackPicture; ///< flag show black picture static xcb_atom_t WmDeleteWindowAtom; ///< WM delete message atom static xcb_atom_t NetWmState; ///< wm-state message atom static xcb_atom_t NetWmStateFullscreen; ///< fullscreen wm-state message atom #ifdef DEBUG extern uint32_t VideoSwitch; ///< ticks for channel switch #endif extern void AudioVideoReady(int64_t); ///< tell audio video is ready #ifdef USE_VIDEO_THREAD static pthread_t VideoThread; ///< video decode thread static pthread_cond_t VideoWakeupCond; ///< wakeup condition variable static pthread_mutex_t VideoMutex; ///< video condition mutex static pthread_mutex_t VideoLockMutex; ///< video lock mutex #endif #ifdef USE_VIDEO_THREAD2 static pthread_t VideoDisplayThread; ///< video decode thread static pthread_cond_t VideoWakeupCond; ///< wakeup condition variable static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex #endif static int OsdConfigWidth; ///< osd configured width static int OsdConfigHeight; ///< osd configured height static char OsdShown; ///< flag show osd static char Osd3DMode; ///< 3D OSD mode static int OsdWidth; ///< osd width static int OsdHeight; ///< osd height static int OsdDirtyX; ///< osd dirty area x static int OsdDirtyY; ///< osd dirty area y static int OsdDirtyWidth; ///< osd dirty area width static int OsdDirtyHeight; ///< osd dirty area height static int64_t VideoDeltaPTS; ///< FIXME: fix pts //---------------------------------------------------------------------------- // Common Functions //---------------------------------------------------------------------------- static void VideoThreadLock(void); ///< lock video thread static void VideoThreadUnlock(void); ///< unlock video thread static void VideoThreadExit(void); ///< exit/kill video thread /// /// Update video pts. /// /// @param pts_p pointer to pts /// @param interlaced interlaced flag (frame isn't right) /// @param frame frame to display /// /// @note frame->interlaced_frame can't be used for interlace detection /// static void VideoSetPts(int64_t * pts_p, int interlaced, const AVCodecContext * video_ctx, const AVFrame * frame) { int64_t pts; int duration; // // Get duration for this frame. // FIXME: using framerate as workaround for av_frame_get_pkt_duration // #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56,13,100) // version for older ffmpeg without framerate if (video_ctx->time_base.num && video_ctx->time_base.den) { duration = (video_ctx->ticks_per_frame * 1000 * video_ctx->time_base.num) / video_ctx->time_base.den; } else { duration = interlaced ? 40 : 20; // 50Hz -> 20ms default } Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->time_base.den, video_ctx->time_base.num, av_frame_get_pkt_duration(frame), duration); #else if (video_ctx->framerate.num && video_ctx->framerate.den) { duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num; } else { duration = interlaced ? 40 : 20; // 50Hz -> 20ms default } Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration); #endif // update video clock if (*pts_p != (int64_t) AV_NOPTS_VALUE) { *pts_p += duration * 90; //Info("video: %s +pts\n", Timestamp2String(*pts_p)); } //av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp"); //pts = frame->best_effort_timestamp; pts = frame->pkt_pts; if (pts == (int64_t) AV_NOPTS_VALUE || !pts) { // libav: 0.8pre didn't set pts pts = frame->pkt_dts; } // libav: sets only pkt_dts which can be 0 if (pts && pts != (int64_t) AV_NOPTS_VALUE) { // build a monotonic pts if (*pts_p != (int64_t) AV_NOPTS_VALUE) { int64_t delta; delta = pts - *pts_p; // ignore negative jumps if (delta > -600 * 90 && delta <= -40 * 90) { if (-delta > VideoDeltaPTS) { VideoDeltaPTS = -delta; Debug(4, "video: %#012" PRIx64 "->%#012" PRIx64 " delta%+4" PRId64 " pts\n", *pts_p, pts, pts - *pts_p); } return; } } else { // first new clock value AudioVideoReady(pts); } if (*pts_p != pts) { Debug(4, "video: %#012" PRIx64 "->%#012" PRIx64 " delta=%4" PRId64 " pts\n", *pts_p, pts, pts - *pts_p); *pts_p = pts; } } } /// /// Update output for new size or aspect ratio. /// /// @param input_aspect_ratio video stream aspect /// static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, int input_height, VideoResolutions resolution, int video_x, int video_y, int video_width, int video_height, int *output_x, int *output_y, int *output_width, int *output_height, int *crop_x, int *crop_y, int *crop_width, int *crop_height) { AVRational display_aspect_ratio; AVRational tmp_ratio; if (!input_aspect_ratio.num || !input_aspect_ratio.den) { input_aspect_ratio.num = 1; input_aspect_ratio.den = 1; Debug(3, "video: aspect defaults to %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den); } av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, input_width * input_aspect_ratio.num, input_height * input_aspect_ratio.den, 1024 * 1024); // InputWidth/Height can be zero = uninitialized if (!input_aspect_ratio.num || !input_aspect_ratio.den) { input_aspect_ratio.num = 1; input_aspect_ratio.den = 1; } display_aspect_ratio.num = VideoScreen->width_in_pixels * VideoScreen->height_in_millimeters; display_aspect_ratio.den = VideoScreen->height_in_pixels * VideoScreen->width_in_millimeters; display_aspect_ratio = av_mul_q(input_aspect_ratio, display_aspect_ratio); Debug(3, "video: aspect %d:%d\n", display_aspect_ratio.num, display_aspect_ratio.den); *crop_x = VideoCutLeftRight[resolution]; *crop_y = VideoCutTopBottom[resolution]; *crop_width = input_width - VideoCutLeftRight[resolution] * 2; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; // FIXME: store different positions for the ratios tmp_ratio.num = 4; tmp_ratio.den = 3; #ifdef DEBUG fprintf(stderr, "ratio: %d:%d %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den, display_aspect_ratio.num, display_aspect_ratio.den); #endif if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) { switch (Video4to3ZoomMode) { case VideoNormal: goto normal; case VideoStretch: goto stretch; case VideoCenterCutOut: goto center_cut_out; case VideoAnamorphic: // FIXME: rest should be done by hardware goto stretch; } } switch (VideoOtherZoomMode) { case VideoNormal: goto normal; case VideoStretch: goto stretch; case VideoCenterCutOut: goto center_cut_out; case VideoAnamorphic: // FIXME: rest should be done by hardware goto stretch; } normal: *output_x = video_x; *output_y = video_y; *output_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den; *output_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num; if (*output_width > video_width) { *output_width = video_width; *output_y += (video_height - *output_height) / 2; } else if (*output_height > video_height) { *output_height = video_height; *output_x += (video_width - *output_width) / 2; } Debug(3, "video: aspect output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y); return; stretch: *output_x = video_x; *output_y = video_y; *output_width = video_width; *output_height = video_height; Debug(3, "video: stretch output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y); return; center_cut_out: *output_x = video_x; *output_y = video_y; *output_height = video_height; *output_width = video_width; *crop_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den; *crop_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num; // look which side must be cut if (*crop_width > video_width) { int tmp; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; // adjust scaling tmp = ((*crop_width - video_width) * input_width) / (2 * video_width); // FIXME: round failure? if (tmp > *crop_x) { *crop_x = tmp; } *crop_width = input_width - *crop_x * 2; } else if (*crop_height > video_height) { int tmp; *crop_width = input_width - VideoCutLeftRight[resolution] * 2; // adjust scaling tmp = ((*crop_height - video_height) * input_height) / (2 * video_height); // FIXME: round failure? if (tmp > *crop_y) { *crop_y = tmp; } *crop_height = input_height - *crop_y * 2; } else { *crop_width = input_width - VideoCutLeftRight[resolution] * 2; *crop_height = input_height - VideoCutTopBottom[resolution] * 2; } Debug(3, "video: aspect crop %dx%d%+d%+d\n", *crop_width, *crop_height, *crop_x, *crop_y); return; } //---------------------------------------------------------------------------- // GLX //---------------------------------------------------------------------------- #ifdef USE_GLX static int GlxEnabled; ///< use GLX static int GlxVSyncEnabled; ///< enable/disable v-sync static GLXContext GlxSharedContext; ///< shared gl context static GLXContext GlxContext; ///< our gl context #ifdef USE_VIDEO_THREAD static GLXContext GlxThreadContext; ///< our gl context for the thread #endif static XVisualInfo *GlxVisualInfo; ///< our gl visual static GLuint OsdGlTextures[2]; ///< gl texture for OSD static int OsdIndex; ///< index into OsdGlTextures /// /// GLX extension functions ///@{ #ifdef GLX_MESA_swap_control static PFNGLXSWAPINTERVALMESAPROC GlxSwapIntervalMESA; #endif #ifdef GLX_SGI_video_sync static PFNGLXGETVIDEOSYNCSGIPROC GlxGetVideoSyncSGI; #endif #ifdef GLX_SGI_swap_control static PFNGLXSWAPINTERVALSGIPROC GlxSwapIntervalSGI; #endif ///@} /// /// GLX check error. /// static void GlxCheck(void) { GLenum err; if ((err = glGetError()) != GL_NO_ERROR) { Debug(3, "video/glx: error %d '%s'\n", err, gluErrorString(err)); } } /// /// GLX check if a GLX extension is supported. /// /// @param ext extension to query /// @returns true if supported, false otherwise /// static int GlxIsExtensionSupported(const char *ext) { const char *extensions; if ((extensions = glXQueryExtensionsString(XlibDisplay, DefaultScreen(XlibDisplay)))) { const char *s; int l; s = strstr(extensions, ext); l = strlen(ext); return s && (s[l] == ' ' || s[l] == '\0'); } return 0; } /// /// Setup GLX decoder /// /// @param width input video textures width /// @param height input video textures height /// @param[OUT] textures created and prepared textures /// static void GlxSetupDecoder(int width, int height, GLuint * textures) { int i; glEnable(GL_TEXTURE_2D); // create 2d texture glGenTextures(2, textures); GlxCheck(); for (i = 0; i < 2; ++i) { glBindTexture(GL_TEXTURE_2D, textures[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glPixelStorei(GL_UNPACK_ALIGNMENT, 4); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); glBindTexture(GL_TEXTURE_2D, 0); } glDisable(GL_TEXTURE_2D); GlxCheck(); } /// /// Render texture. /// /// @param texture 2d texture /// @param x window x /// @param y window y /// @param width window width /// @param height window height /// static inline void GlxRenderTexture(GLuint texture, int x, int y, int width, int height) { glEnable(GL_TEXTURE_2D); glBindTexture(GL_TEXTURE_2D, texture); glColor4f(1.0f, 1.0f, 1.0f, 1.0f); // no color glBegin(GL_QUADS); { glTexCoord2f(1.0f, 1.0f); glVertex2i(x + width, y + height); glTexCoord2f(0.0f, 1.0f); glVertex2i(x, y + height); glTexCoord2f(0.0f, 0.0f); glVertex2i(x, y); glTexCoord2f(1.0f, 0.0f); glVertex2i(x + width, y); } glEnd(); glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// Upload OSD texture. /// /// @param x x coordinate texture /// @param y y coordinate texture /// @param width argb image width /// @param height argb image height /// @param argb argb image /// static void GlxUploadOsdTexture(int x, int y, int width, int height, const uint8_t * argb) { // FIXME: use other / faster uploads // ARB_pixelbuffer_object GL_PIXEL_UNPACK_BUFFER glBindBufferARB() // glMapBuffer() glUnmapBuffer() glEnable(GL_TEXTURE_2D); // upload 2d texture glBindTexture(GL_TEXTURE_2D, OsdGlTextures[OsdIndex]); glTexSubImage2D(GL_TEXTURE_2D, 0, x, y, width, height, GL_BGRA, GL_UNSIGNED_BYTE, argb); glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// GLX initialize OSD. /// /// @param width osd width /// @param height osd height /// static void GlxOsdInit(int width, int height) { int i; #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } #endif Debug(3, "video/glx: osd init context %p <-> %p\n", glXGetCurrentContext(), GlxContext); // // create a RGBA texture. // glEnable(GL_TEXTURE_2D); // create 2d texture(s) glGenTextures(2, OsdGlTextures); for (i = 0; i < 2; ++i) { glBindTexture(GL_TEXTURE_2D, OsdGlTextures[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glPixelStorei(GL_UNPACK_ALIGNMENT, 4); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); } glBindTexture(GL_TEXTURE_2D, 0); glDisable(GL_TEXTURE_2D); } /// /// GLX cleanup osd. /// static void GlxOsdExit(void) { if (OsdGlTextures[0]) { glDeleteTextures(2, OsdGlTextures); OsdGlTextures[0] = 0; OsdGlTextures[1] = 0; } } /// /// Upload ARGB image to texture. /// /// @param x x coordinate of image in osd texture /// @param y y coordinate of image in osd texture /// @param width width of image /// @param height height of image /// @param argb argb image /// /// @note looked by caller /// static void GlxOsdDrawARGB(int x, int y, int width, int height, const uint8_t * argb) { #ifdef DEBUG uint32_t start; uint32_t end; #endif #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } start = GetMsTicks(); Debug(3, "video/glx: osd context %p <-> %p\n", glXGetCurrentContext(), GlxContext); #endif // set glx context if (!glXMakeCurrent(XlibDisplay, VideoWindow, GlxContext)) { Error(_("video/glx: can't make glx context current\n")); return; } GlxUploadOsdTexture(x, y, width, height, argb); glXMakeCurrent(XlibDisplay, None, NULL); #ifdef DEBUG end = GetMsTicks(); Debug(3, "video/glx: osd upload %dx%d%+d%+d %dms %d\n", width, height, x, y, end - start, width * height * 4); #endif } /// /// Clear OSD texture. /// /// @note looked by caller /// static void GlxOsdClear(void) { void *texbuf; #ifdef DEBUG if (!GlxEnabled) { Debug(3, "video/glx: %s called without glx enabled\n", __FUNCTION__); return; } Debug(3, "video/glx: osd context %p <-> %p\n", glXGetCurrentContext(), GlxContext); #endif // FIXME: any opengl function to clear an area? // FIXME: if not; use zero buffer // FIXME: if not; use dirty area // set glx context if (!glXMakeCurrent(XlibDisplay, VideoWindow, GlxContext)) { Error(_("video/glx: can't make glx context current\n")); return; } texbuf = calloc(OsdWidth * OsdHeight, 4); GlxUploadOsdTexture(0, 0, OsdWidth, OsdHeight, texbuf); glXMakeCurrent(XlibDisplay, None, NULL); free(texbuf); } /// /// Setup GLX window. /// /// @param window xcb window id /// @param width window width /// @param height window height /// @param context GLX context /// static void GlxSetupWindow(xcb_window_t window, int width, int height, GLXContext context) { #ifdef DEBUG uint32_t start; uint32_t end; int i; unsigned count; #endif Debug(3, "video/glx: %s %x %dx%d context:%p", __FUNCTION__, window, width, height, context); // set glx context if (!glXMakeCurrent(XlibDisplay, window, context)) { Error(_("video/glx: can't make glx context current\n")); GlxEnabled = 0; return; } Debug(3, "video/glx: ok\n"); #ifdef DEBUG // check if v-sync is working correct end = GetMsTicks(); for (i = 0; i < 10; ++i) { start = end; glClear(GL_COLOR_BUFFER_BIT); glXSwapBuffers(XlibDisplay, window); end = GetMsTicks(); GlxGetVideoSyncSGI(&count); Debug(3, "video/glx: %5d frame rate %dms\n", count, end - start); // nvidia can queue 5 swaps if (i > 5 && (end - start) < 15) { Warning(_("video/glx: no v-sync\n")); } } #endif // viewpoint GlxCheck(); glViewport(0, 0, width, height); glDepthRange(-1.0, 1.0); glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glColor3f(1.0f, 1.0f, 1.0f); glClearDepth(1.0); GlxCheck(); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, width, height, 0.0, -1.0, 1.0); GlxCheck(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_DEPTH_TEST); // setup 2d drawing glDepthMask(GL_FALSE); glDisable(GL_CULL_FACE); #ifdef USE_DOUBLEBUFFER glDrawBuffer(GL_BACK); #else glDrawBuffer(GL_FRONT); #endif glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); #ifdef DEBUG #ifdef USE_DOUBLEBUFFER glDrawBuffer(GL_FRONT); glClearColor(1.0f, 0.0f, 1.0f, 1.0f); glClear(GL_COLOR_BUFFER_BIT); glDrawBuffer(GL_BACK); #endif #endif // clear glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // intial background color glClear(GL_COLOR_BUFFER_BIT); #ifdef DEBUG glClearColor(1.0f, 1.0f, 0.0f, 1.0f); // background color #endif GlxCheck(); } /// /// Initialize GLX. /// static void GlxInit(void) { static GLint visual_attr[] = { GLX_RGBA, GLX_RED_SIZE, 8, GLX_GREEN_SIZE, 8, GLX_BLUE_SIZE, 8, #ifdef USE_DOUBLEBUFFER GLX_DOUBLEBUFFER, #endif None }; XVisualInfo *vi; GLXContext context; int major; int minor; int glx_GLX_EXT_swap_control; int glx_GLX_MESA_swap_control; int glx_GLX_SGI_swap_control; int glx_GLX_SGI_video_sync; if (!glXQueryVersion(XlibDisplay, &major, &minor)) { Error(_("video/glx: no GLX support\n")); GlxEnabled = 0; return; } Info(_("video/glx: glx version %d.%d\n"), major, minor); // // check which extension are supported // glx_GLX_EXT_swap_control = GlxIsExtensionSupported("GLX_EXT_swap_control"); glx_GLX_MESA_swap_control = GlxIsExtensionSupported("GLX_MESA_swap_control"); glx_GLX_SGI_swap_control = GlxIsExtensionSupported("GLX_SGI_swap_control"); glx_GLX_SGI_video_sync = GlxIsExtensionSupported("GLX_SGI_video_sync"); #ifdef GLX_MESA_swap_control if (glx_GLX_MESA_swap_control) { GlxSwapIntervalMESA = (PFNGLXSWAPINTERVALMESAPROC) glXGetProcAddress((const GLubyte *)"glXSwapIntervalMESA"); } Debug(3, "video/glx: GlxSwapIntervalMESA=%p\n", GlxSwapIntervalMESA); #endif #ifdef GLX_SGI_swap_control if (glx_GLX_SGI_swap_control) { GlxSwapIntervalSGI = (PFNGLXSWAPINTERVALSGIPROC) glXGetProcAddress((const GLubyte *)"glXSwapIntervalSGI"); } Debug(3, "video/glx: GlxSwapIntervalSGI=%p\n", GlxSwapIntervalSGI); #endif #ifdef GLX_SGI_video_sync if (glx_GLX_SGI_video_sync) { GlxGetVideoSyncSGI = (PFNGLXGETVIDEOSYNCSGIPROC) glXGetProcAddress((const GLubyte *)"glXGetVideoSyncSGI"); } Debug(3, "video/glx: GlxGetVideoSyncSGI=%p\n", GlxGetVideoSyncSGI); #endif // glXGetVideoSyncSGI glXWaitVideoSyncSGI #if 0 // FIXME: use xcb: xcb_glx_create_context #endif // create glx context glXMakeCurrent(XlibDisplay, None, NULL); vi = glXChooseVisual(XlibDisplay, DefaultScreen(XlibDisplay), visual_attr); if (!vi) { Error(_("video/glx: can't get a RGB visual\n")); GlxEnabled = 0; return; } if (!vi->visual) { Error(_("video/glx: no valid visual found\n")); GlxEnabled = 0; return; } if (vi->bits_per_rgb < 8) { Error(_("video/glx: need atleast 8-bits per RGB\n")); GlxEnabled = 0; return; } context = glXCreateContext(XlibDisplay, vi, NULL, GL_TRUE); if (!context) { Error(_("video/glx: can't create glx context\n")); GlxEnabled = 0; return; } GlxSharedContext = context; context = glXCreateContext(XlibDisplay, vi, GlxSharedContext, GL_TRUE); if (!context) { Error(_("video/glx: can't create glx context\n")); GlxEnabled = 0; glXDestroyContext(XlibDisplay, GlxSharedContext); GlxSharedContext = 0; return; } GlxContext = context; GlxVisualInfo = vi; Debug(3, "video/glx: visual %#02x depth %u\n", (unsigned)vi->visualid, vi->depth); // // query default v-sync state // if (glx_GLX_EXT_swap_control) { unsigned tmp; tmp = -1; glXQueryDrawable(XlibDisplay, DefaultRootWindow(XlibDisplay), GLX_SWAP_INTERVAL_EXT, &tmp); GlxCheck(); Debug(3, "video/glx: default v-sync is %d\n", tmp); } else { Debug(3, "video/glx: default v-sync is unknown\n"); } // // disable wait on v-sync // // FIXME: sleep before swap / busy waiting hardware // FIXME: 60hz lcd panel // FIXME: config: default, on, off #ifdef GLX_SGI_swap_control if (GlxVSyncEnabled < 0 && GlxSwapIntervalSGI) { if (GlxSwapIntervalSGI(0)) { GlxCheck(); Warning(_("video/glx: can't disable v-sync\n")); } else { Info(_("video/glx: v-sync disabled\n")); } } else #endif #ifdef GLX_MESA_swap_control if (GlxVSyncEnabled < 0 && GlxSwapIntervalMESA) { if (GlxSwapIntervalMESA(0)) { GlxCheck(); Warning(_("video/glx: can't disable v-sync\n")); } else { Info(_("video/glx: v-sync disabled\n")); } } #endif // // enable wait on v-sync // #ifdef GLX_SGI_swap_control if (GlxVSyncEnabled > 0 && GlxSwapIntervalMESA) { if (GlxSwapIntervalMESA(1)) { GlxCheck(); Warning(_("video/glx: can't enable v-sync\n")); } else { Info(_("video/glx: v-sync enabled\n")); } } else #endif #ifdef GLX_MESA_swap_control if (GlxVSyncEnabled > 0 && GlxSwapIntervalSGI) { if (GlxSwapIntervalSGI(1)) { GlxCheck(); Warning(_("video/glx: can't enable v-sync\n")); } else { Info(_("video/glx: v-sync enabled\n")); } } #endif } /// /// Cleanup GLX. /// static void GlxExit(void) { Debug(3, "video/glx: %s\n", __FUNCTION__); glFinish(); // must destroy glx if (glXGetCurrentContext() == GlxContext) { // if currently used, set to none glXMakeCurrent(XlibDisplay, None, NULL); } if (GlxSharedContext) { glXDestroyContext(XlibDisplay, GlxSharedContext); } if (GlxContext) { glXDestroyContext(XlibDisplay, GlxContext); } if (GlxThreadContext) { glXDestroyContext(XlibDisplay, GlxThreadContext); } // FIXME: must free GlxVisualInfo } #endif //---------------------------------------------------------------------------- // common functions //---------------------------------------------------------------------------- /// /// Calculate resolution group. /// /// @param width video picture raw width /// @param height video picture raw height /// @param interlace flag interlaced video picture /// /// @note interlace isn't used yet and probably wrong set by caller. /// static VideoResolutions VideoResolutionGroup(int width, int height, __attribute__ ((unused)) int interlace) { if (height <= 576) { return VideoResolution576i; } if (height <= 720) { return VideoResolution720p; } if (height < 1080) { return VideoResolutionFake1080i; } if (width < 1920) { return VideoResolutionFake1080i; } return VideoResolution1080i; } //---------------------------------------------------------------------------- // auto-crop //---------------------------------------------------------------------------- /// /// auto-crop context structure and typedef. /// typedef struct _auto_crop_ctx_ { int X1; ///< detected left border int X2; ///< detected right border int Y1; ///< detected top border int Y2; ///< detected bottom border int Count; ///< counter to delay switch int State; ///< auto-crop state (0, 14, 16) } AutoCropCtx; #ifdef USE_AUTOCROP #define YBLACK 0x20 ///< below is black #define UVBLACK 0x80 ///< around is black #define M64 UINT64_C(0x0101010101010101) ///< 64bit multiplicator /// auto-crop percent of video width to ignore logos static const int AutoCropLogoIgnore = 24; static int AutoCropInterval; ///< auto-crop check interval static int AutoCropDelay; ///< auto-crop switch delay static int AutoCropTolerance; ///< auto-crop tolerance /// /// Detect black line Y. /// /// @param data Y plane pixel data /// @param length number of pixel to check /// @param stride offset of pixels /// /// @note 8 pixel are checked at once, all values must be 8 aligned /// static int AutoCropIsBlackLineY(const uint8_t * data, int length, int stride) { int n; int o; uint64_t r; const uint64_t *p; #ifdef DEBUG if ((size_t) data & 0x7 || stride & 0x7) { abort(); } #endif p = (const uint64_t *)data; n = length; // FIXME: can remove n o = stride / 8; r = 0UL; while (--n >= 0) { r |= *p; p += o; } // below YBLACK(0x20) is black return !(r & ~((YBLACK - 1) * M64)); } /// /// Auto detect black borders and crop them. /// /// @param autocrop auto-crop variables /// @param width frame width in pixel /// @param height frame height in pixel /// @param data frame planes data (Y, U, V) /// @param pitches frame planes pitches (Y, U, V) /// /// @note FIXME: can reduce the checked range, left, right crop isn't /// used yet. /// /// @note FIXME: only Y is checked, for black. /// static void AutoCropDetect(AutoCropCtx * autocrop, int width, int height, void *data[3], uint32_t pitches[3]) { const void *data_y; unsigned length_y; int x; int y; int x1; int x2; int y1; int y2; int logo_skip; // // ignore top+bottom 6 lines and left+right 8 pixels // #define SKIP_X 8 #define SKIP_Y 6 x1 = width - 1; x2 = 0; y1 = height - 1; y2 = 0; logo_skip = SKIP_X + (((width * AutoCropLogoIgnore) / 100 + 8) / 8) * 8; data_y = data[0]; length_y = pitches[0]; // // search top // for (y = SKIP_Y; y < y1; ++y) { if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) { if (y == SKIP_Y) { y = 0; } y1 = y; break; } } // // search bottom // for (y = height - SKIP_Y - 1; y > y2; --y) { if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) { if (y == height - SKIP_Y - 1) { y = height - 1; } y2 = y; break; } } // // search left // for (x = SKIP_X; x < x1; x += 8) { if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y, length_y)) { if (x == SKIP_X) { x = 0; } x1 = x; break; } } // // search right // for (x = width - SKIP_X - 8; x > x2; x -= 8) { if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y * 8, length_y)) { if (x == width - SKIP_X - 8) { x = width - 1; } x2 = x; break; } } if (0 && (y1 > SKIP_Y || x1 > SKIP_X)) { Debug(3, "video/autocrop: top=%d bottom=%d left=%d right=%d\n", y1, y2, x1, x2); } autocrop->X1 = x1; autocrop->X2 = x2; autocrop->Y1 = y1; autocrop->Y2 = y2; } #endif //---------------------------------------------------------------------------- // software - deinterlace //---------------------------------------------------------------------------- // FIXME: move general software deinterlace functions to here. //---------------------------------------------------------------------------- // VA-API //---------------------------------------------------------------------------- #ifdef USE_VAAPI static char VaapiBuggyXvBA; ///< fix xvba-video bugs static char VaapiBuggyVdpau; ///< fix libva-driver-vdpau bugs static char VaapiBuggyIntel; ///< fix libva-driver-intel bugs static VADisplay *VaDisplay; ///< VA-API display static VAImage VaOsdImage = { .image_id = VA_INVALID_ID }; ///< osd VA-API image static VASubpictureID VaOsdSubpicture = VA_INVALID_ID; ///< osd VA-API subpicture static char VaapiUnscaledOsd; ///< unscaled osd supported #if VA_CHECK_VERSION(0,33,99) static char VaapiVideoProcessing; ///< supports video processing #endif /// VA-API decoder typedef typedef struct _vaapi_decoder_ VaapiDecoder; /// /// VA-API decoder /// struct _vaapi_decoder_ { VADisplay *VaDisplay; ///< VA-API display xcb_window_t Window; ///< output window int VideoX; ///< video base x coordinate int VideoY; ///< video base y coordinate int VideoWidth; ///< video base width int VideoHeight; ///< video base height int OutputX; ///< real video output x coordinate int OutputY; ///< real video output y coordinate int OutputWidth; ///< real video output width int OutputHeight; ///< real video output height /// flags for put surface for different resolutions groups unsigned SurfaceFlagsTable[VideoResolutionMax]; enum PixelFormat PixFmt; ///< ffmpeg frame pixfmt int WrongInterlacedWarned; ///< warning about interlace flag issued int Interlaced; ///< ffmpeg interlaced flag int TopFieldFirst; ///< ffmpeg top field displayed first VAImage DeintImages[5]; ///< deinterlace image buffers int GetPutImage; ///< flag get/put image can be used VAImage Image[1]; ///< image buffer to update surface VAProfile Profile; ///< VA-API profile VAEntrypoint Entrypoint; ///< VA-API entrypoint struct vaapi_context VaapiContext[1]; ///< ffmpeg VA-API context int SurfacesNeeded; ///< number of surface to request int SurfaceUsedN; ///< number of used surfaces /// used surface ids VASurfaceID SurfacesUsed[CODEC_SURFACES_MAX]; int SurfaceFreeN; ///< number of free surfaces /// free surface ids VASurfaceID SurfacesFree[CODEC_SURFACES_MAX]; int InputWidth; ///< video input width int InputHeight; ///< video input height AVRational InputAspect; ///< video input aspect ratio VideoResolutions Resolution; ///< resolution group int CropX; ///< video crop x int CropY; ///< video crop y int CropWidth; ///< video crop width int CropHeight; ///< video crop height #ifdef USE_AUTOCROP AutoCropCtx AutoCrop[1]; ///< auto-crop variables #endif #ifdef USE_GLX GLuint GlTextures[2]; ///< gl texture for VA-API void *GlxSurfaces[2]; ///< VA-API/GLX surface #endif VASurfaceID BlackSurface; ///< empty black surface /// video surface ring buffer VASurfaceID SurfacesRb[VIDEO_SURFACES_MAX]; #ifdef VA_EXP VASurfaceID LastSurface; ///< last surface #endif int SurfaceWrite; ///< write pointer int SurfaceRead; ///< read pointer atomic_t SurfacesFilled; ///< how many of the buffer is used int SurfaceField; ///< current displayed field int TrickSpeed; ///< current trick speed int TrickCounter; ///< current trick speed counter struct timespec FrameTime; ///< time of last display VideoStream *Stream; ///< video stream int Closing; ///< flag about closing current stream int SyncOnAudio; ///< flag sync to audio int64_t PTS; ///< video PTS clock int LastAVDiff; ///< last audio - video difference int SyncCounter; ///< counter to sync frames int StartCounter; ///< counter for video start int FramesDuped; ///< number of frames duplicated int FramesMissed; ///< number of frames missed int FramesDropped; ///< number of frames dropped int FrameCounter; ///< number of frames decoded int FramesDisplayed; ///< number of frames displayed }; static VaapiDecoder *VaapiDecoders[1]; ///< open decoder streams static int VaapiDecoderN; ///< number of decoder streams /// forward display back surface static void VaapiBlackSurface(VaapiDecoder *); /// forward destroy deinterlace images static void VaapiDestroyDeinterlaceImages(VaapiDecoder *); /// forward definition release surface static void VaapiReleaseSurface(VaapiDecoder *, VASurfaceID); //---------------------------------------------------------------------------- // VA-API Functions //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- /// /// Output video messages. /// /// Reduce output. /// /// @param level message level (Error, Warning, Info, Debug, ...) /// @param format printf format string (NULL to flush messages) /// @param ... printf arguments /// /// @returns true, if message shown /// /// @todo FIXME: combine VdpauMessage and VaapiMessage /// static int VaapiMessage(int level, const char *format, ...) { if (SysLogLevel > level || DebugLevel > level) { static const char *last_format; static char buf[256]; va_list ap; va_start(ap, format); if (format != last_format) { // don't repeat same message if (buf[0]) { // print last repeated message syslog(LOG_ERR, "%s", buf); buf[0] = '\0'; } if (format) { last_format = format; vsyslog(LOG_ERR, format, ap); } va_end(ap); return 1; } vsnprintf(buf, sizeof(buf), format, ap); va_end(ap); } return 0; } // Surfaces ------------------------------------------------------------- /// /// Associate OSD with surface. /// /// @param decoder VA-API decoder /// static void VaapiAssociate(VaapiDecoder * decoder) { int x; int y; int w; int h; if (VaOsdSubpicture == VA_INVALID_ID) { Warning(_("video/vaapi: no osd subpicture yet\n")); return; } x = 0; y = 0; w = VaOsdImage.width; h = VaOsdImage.height; // FIXME: associate only if osd is displayed if (VaapiUnscaledOsd) { if (decoder->SurfaceFreeN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN, x, y, w, h, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } if (decoder->SurfaceUsedN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN, x, y, w, h, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } else { if (decoder->SurfaceFreeN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN, x, y, w, h, decoder->CropX, decoder->CropY / 2, decoder->CropWidth, decoder->CropHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } if (decoder->SurfaceUsedN && vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN, x, y, w, h, decoder->CropX, decoder->CropY / 2, decoder->CropWidth, decoder->CropHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } } /// /// Deassociate OSD with surface. /// /// @param decoder VA-API decoder /// static void VaapiDeassociate(VaapiDecoder * decoder) { if (VaOsdSubpicture != VA_INVALID_ID) { if (decoder->SurfaceFreeN && vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesFree, decoder->SurfaceFreeN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate %d surfaces\n"), decoder->SurfaceFreeN); } if (decoder->SurfaceUsedN && vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, decoder->SurfacesUsed, decoder->SurfaceUsedN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate %d surfaces\n"), decoder->SurfaceUsedN); } } } /// /// Create surfaces for VA-API decoder. /// /// @param decoder VA-API decoder /// @param width surface source/video width /// @param height surface source/video height /// static void VaapiCreateSurfaces(VaapiDecoder * decoder, int width, int height) { #ifdef DEBUG if (!decoder->SurfacesNeeded) { Error(_("video/vaapi: surface needed not set\n")); decoder->SurfacesNeeded = 3 + VIDEO_SURFACES_MAX; } #endif Debug(3, "video/vaapi: %s: %dx%d * %d\n", __FUNCTION__, width, height, decoder->SurfacesNeeded); decoder->SurfaceFreeN = decoder->SurfacesNeeded; // VA_RT_FORMAT_YUV420 VA_RT_FORMAT_YUV422 VA_RT_FORMAT_YUV444 if (vaCreateSurfaces(decoder->VaDisplay, VA_RT_FORMAT_YUV420, width, height, decoder->SurfacesFree, decoder->SurfaceFreeN, NULL, 0) != VA_STATUS_SUCCESS) { Fatal(_("video/vaapi: can't create %d surfaces\n"), decoder->SurfaceFreeN); // FIXME: write error handler / fallback } } /// /// Destroy surfaces of VA-API decoder. /// /// @param decoder VA-API decoder /// static void VaapiDestroySurfaces(VaapiDecoder * decoder) { Debug(3, "video/vaapi: %s:\n", __FUNCTION__); // // update OSD associate // VaapiDeassociate(decoder); if (vaDestroySurfaces(decoder->VaDisplay, decoder->SurfacesFree, decoder->SurfaceFreeN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy %d surfaces\n"), decoder->SurfaceFreeN); } decoder->SurfaceFreeN = 0; if (vaDestroySurfaces(decoder->VaDisplay, decoder->SurfacesUsed, decoder->SurfaceUsedN) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy %d surfaces\n"), decoder->SurfaceUsedN); } decoder->SurfaceUsedN = 0; // FIXME surfaces used for output } /// /// Get a free surface. /// /// @param decoder VA-API decoder /// /// @returns the oldest free surface /// static VASurfaceID VaapiGetSurface0(VaapiDecoder * decoder) { VASurfaceID surface; VASurfaceStatus status; int i; // try to use oldest surface for (i = 0; i < decoder->SurfaceFreeN; ++i) { surface = decoder->SurfacesFree[i]; if (vaQuerySurfaceStatus(decoder->VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { // this fails with XvBA und mpeg softdecoder if (!VaapiBuggyXvBA) { Error(_("video/vaapi: vaQuerySurface failed\n")); } status = VASurfaceReady; } // surface still in use, try next if (status != VASurfaceReady) { Debug(4, "video/vaapi: surface %#010x not ready: %d\n", surface, status); if (!VaapiBuggyVdpau || i < 1) { continue; } usleep(1 * 1000); } // copy remaining surfaces down decoder->SurfaceFreeN--; for (; i < decoder->SurfaceFreeN; ++i) { decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1]; } decoder->SurfacesFree[i] = VA_INVALID_ID; // save as used decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface; return surface; } Error(_("video/vaapi: out of surfaces\n")); return VA_INVALID_ID; } /// /// Release a surface. /// /// @param decoder VA-API decoder /// @param surface surface no longer used /// static void VaapiReleaseSurface(VaapiDecoder * decoder, VASurfaceID surface) { int i; for (i = 0; i < decoder->SurfaceUsedN; ++i) { if (decoder->SurfacesUsed[i] == surface) { // no problem, with last used decoder->SurfacesUsed[i] = decoder->SurfacesUsed[--decoder->SurfaceUsedN]; decoder->SurfacesFree[decoder->SurfaceFreeN++] = surface; return; } } Error(_("video/vaapi: release surface %#010x, which is not in use\n"), surface); } // Init/Exit ------------------------------------------------------------ /// /// Debug VA-API decoder frames drop... /// /// @param decoder video hardware decoder /// static void VaapiPrintFrames(const VaapiDecoder * decoder) { Debug(3, "video/vaapi: %d missed, %d duped, %d dropped frames of %d,%d\n", decoder->FramesMissed, decoder->FramesDuped, decoder->FramesDropped, decoder->FrameCounter, decoder->FramesDisplayed); #ifndef DEBUG (void)decoder; #endif } /// /// Initialize surface flags. /// /// @param decoder video hardware decoder /// static void VaapiInitSurfaceFlags(VaapiDecoder * decoder) { int i; for (i = 0; i < VideoResolutionMax; ++i) { decoder->SurfaceFlagsTable[i] = VA_CLEAR_DRAWABLE; // color space conversion none, ITU-R BT.601, ITU-R BT.709, ... switch (VideoColorSpaces[i]) { case VideoColorSpaceNone: break; case VideoColorSpaceBt601: decoder->SurfaceFlagsTable[i] |= VA_SRC_BT601; break; case VideoColorSpaceBt709: decoder->SurfaceFlagsTable[i] |= VA_SRC_BT709; break; case VideoColorSpaceSmpte240: decoder->SurfaceFlagsTable[i] |= VA_SRC_SMPTE_240; break; } // scaling flags FAST, HQ, NL_ANAMORPHIC switch (VideoScaling[i]) { case VideoScalingNormal: decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_DEFAULT; break; case VideoScalingFast: decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_FAST; break; case VideoScalingHQ: // vdpau backend supports only VA_FILTER_SCALING_HQ // vdpau backend with advanced deinterlacer and my GT-210 // is too slow decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_HQ; break; case VideoScalingAnamorphic: // intel backend supports only VA_FILTER_SCALING_NL_ANAMORPHIC; // FIXME: Highlevel should display 4:3 as 16:9 to support this decoder->SurfaceFlagsTable[i] |= VA_FILTER_SCALING_NL_ANAMORPHIC; break; } // deinterlace flags (not yet supported by libva) switch (VideoDeinterlace[i]) { case VideoDeinterlaceBob: break; case VideoDeinterlaceWeave: break; case VideoDeinterlaceTemporal: //FIXME: private hack //decoder->SurfaceFlagsTable[i] |= 0x00002000; break; case VideoDeinterlaceTemporalSpatial: //FIXME: private hack //decoder->SurfaceFlagsTable[i] |= 0x00006000; break; default: break; } } } /// /// Allocate new VA-API decoder. /// /// @returns a new prepared VA-API hardware decoder. /// static VaapiDecoder *VaapiNewHwDecoder(VideoStream * stream) { VaapiDecoder *decoder; int i; if (VaapiDecoderN == 1) { Fatal(_("video/vaapi: out of decoders\n")); } if (!(decoder = calloc(1, sizeof(*decoder)))) { Fatal(_("video/vaapi: out of memory\n")); } decoder->VaDisplay = VaDisplay; decoder->Window = VideoWindow; decoder->VideoX = 0; decoder->VideoY = 0; decoder->VideoWidth = VideoWindowWidth; decoder->VideoHeight = VideoWindowHeight; VaapiInitSurfaceFlags(decoder); decoder->DeintImages[0].image_id = VA_INVALID_ID; decoder->DeintImages[1].image_id = VA_INVALID_ID; decoder->DeintImages[2].image_id = VA_INVALID_ID; decoder->DeintImages[3].image_id = VA_INVALID_ID; decoder->DeintImages[4].image_id = VA_INVALID_ID; decoder->Image->image_id = VA_INVALID_ID; for (i = 0; i < CODEC_SURFACES_MAX; ++i) { decoder->SurfacesUsed[i] = VA_INVALID_ID; decoder->SurfacesFree[i] = VA_INVALID_ID; } // setup video surface ring buffer atomic_set(&decoder->SurfacesFilled, 0); for (i = 0; i < VIDEO_SURFACES_MAX; ++i) { decoder->SurfacesRb[i] = VA_INVALID_ID; } #ifdef VA_EXP decoder->LastSurface = VA_INVALID_ID; #endif decoder->BlackSurface = VA_INVALID_ID; // // Setup ffmpeg vaapi context // decoder->Profile = VA_INVALID_ID; decoder->Entrypoint = VA_INVALID_ID; decoder->VaapiContext->display = VaDisplay; decoder->VaapiContext->config_id = VA_INVALID_ID; decoder->VaapiContext->context_id = VA_INVALID_ID; #ifdef USE_GLX decoder->GlxSurfaces[0] = NULL; decoder->GlxSurfaces[1] = NULL; if (GlxEnabled) { // FIXME: create GLX context here } #endif decoder->OutputWidth = VideoWindowWidth; decoder->OutputHeight = VideoWindowHeight; decoder->PixFmt = PIX_FMT_NONE; decoder->Stream = stream; if (!VaapiDecoderN) { // FIXME: hack sync on audio decoder->SyncOnAudio = 1; } decoder->Closing = -300 - 1; decoder->PTS = AV_NOPTS_VALUE; // old va-api intel driver didn't supported get/put-image. #if VA_CHECK_VERSION(0,33,99) // FIXME: not the exact version with support decoder->GetPutImage = 1; #else decoder->GetPutImage = !VaapiBuggyIntel; #endif VaapiDecoders[VaapiDecoderN++] = decoder; return decoder; } /// /// Cleanup VA-API. /// /// @param decoder va-api hw decoder /// static void VaapiCleanup(VaapiDecoder * decoder) { int filled; VASurfaceID surface; int i; // flush output queue, only 1-2 frames buffered, no big loss while ((filled = atomic_read(&decoder->SurfacesFilled))) { decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX; atomic_dec(&decoder->SurfacesFilled); surface = decoder->SurfacesRb[decoder->SurfaceRead]; if (surface == VA_INVALID_ID) { Error(_("video/vaapi: invalid surface in ringbuffer\n")); continue; } // can crash and hang if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #ifdef DEBUG if (decoder->SurfaceRead != decoder->SurfaceWrite) { abort(); } #endif // clear ring buffer for (i = 0; i < VIDEO_SURFACES_MAX; ++i) { decoder->SurfacesRb[i] = VA_INVALID_ID; } #ifdef VA_EXP decoder->LastSurface = VA_INVALID_ID; #endif decoder->WrongInterlacedWarned = 0; // cleanup image if (decoder->Image->image_id != VA_INVALID_ID) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } // cleanup context and config if (decoder->VaapiContext) { if (decoder->VaapiContext->context_id != VA_INVALID_ID) { if (vaDestroyContext(VaDisplay, decoder->VaapiContext->context_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy context!\n")); } decoder->VaapiContext->context_id = VA_INVALID_ID; } if (decoder->VaapiContext->config_id != VA_INVALID_ID) { if (vaDestroyConfig(VaDisplay, decoder->VaapiContext->config_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy config!\n")); } decoder->VaapiContext->config_id = VA_INVALID_ID; } } // cleanup surfaces if (decoder->SurfaceFreeN || decoder->SurfaceUsedN) { VaapiDestroySurfaces(decoder); } // cleanup images if (decoder->DeintImages[0].image_id != VA_INVALID_ID) { VaapiDestroyDeinterlaceImages(decoder); } decoder->SurfaceRead = 0; decoder->SurfaceWrite = 0; decoder->SurfaceField = 0; decoder->SyncCounter = 0; decoder->FrameCounter = 0; decoder->FramesDisplayed = 0; decoder->StartCounter = 0; decoder->Closing = 0; decoder->PTS = AV_NOPTS_VALUE; VideoDeltaPTS = 0; } /// /// Destroy a VA-API decoder. /// /// @param decoder VA-API decoder /// static void VaapiDelHwDecoder(VaapiDecoder * decoder) { int i; for (i = 0; i < VaapiDecoderN; ++i) { if (VaapiDecoders[i] == decoder) { VaapiDecoders[i] = NULL; VaapiDecoderN--; // FIXME: must copy last slot into empty slot and -- break; } } VaapiCleanup(decoder); if (decoder->BlackSurface != VA_INVALID_ID) { // // update OSD associate // if (VaOsdSubpicture != VA_INVALID_ID) { if (vaDeassociateSubpicture(VaDisplay, VaOsdSubpicture, &decoder->BlackSurface, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't deassociate black surfaces\n")); } } if (vaDestroySurfaces(decoder->VaDisplay, &decoder->BlackSurface, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy a surface\n")); } } #ifdef USE_GLX if (decoder->GlxSurfaces[0]) { if (vaDestroySurfaceGLX(VaDisplay, decoder->GlxSurfaces[0]) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy glx surface!\n")); } decoder->GlxSurfaces[0] = NULL; } if (decoder->GlxSurfaces[1]) { if (vaDestroySurfaceGLX(VaDisplay, decoder->GlxSurfaces[1]) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy glx surface!\n")); } decoder->GlxSurfaces[0] = NULL; } if (decoder->GlTextures[0]) { glDeleteTextures(2, decoder->GlTextures); } #endif VaapiPrintFrames(decoder); free(decoder); } #ifdef DEBUG // currently unused, keep it for later static VAProfile VaapiFindProfile(const VAProfile * profiles, unsigned n, VAProfile profile); static VAEntrypoint VaapiFindEntrypoint(const VAEntrypoint * entrypoints, unsigned n, VAEntrypoint entrypoint); /// /// 1080i /// static void Vaapi1080i(void) { VAProfile profiles[vaMaxNumProfiles(VaDisplay)]; int profile_n; VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int p; int e; VAConfigAttrib attrib; VAConfigID config_id; VAContextID context_id; VASurfaceID surfaces[32]; VAImage image[1]; int n; uint32_t start_tick; uint32_t tick; p = -1; e = -1; // prepare va-api profiles if (vaQueryConfigProfiles(VaDisplay, profiles, &profile_n)) { Error(_("codec: vaQueryConfigProfiles failed")); return; } // check profile p = VaapiFindProfile(profiles, profile_n, VAProfileH264High); if (p == -1) { Debug(3, "\tno profile found\n"); return; } // prepare va-api entry points if (vaQueryConfigEntrypoints(VaDisplay, p, entrypoints, &entrypoint_n)) { Error(_("codec: vaQueryConfigEntrypoints failed")); return; } e = VaapiFindEntrypoint(entrypoints, entrypoint_n, VAEntrypointVLD); if (e == -1) { Warning(_("codec: unsupported: slow path\n")); return; } memset(&attrib, 0, sizeof(attrib)); attrib.type = VAConfigAttribRTFormat; attrib.value = VA_RT_FORMAT_YUV420; // create a configuration for the decode pipeline if (vaCreateConfig(VaDisplay, p, e, &attrib, 1, &config_id)) { Error(_("codec: can't create config")); return; } if (vaCreateSurfaces(VaDisplay, VA_RT_FORMAT_YUV420, 1920, 1080, surfaces, 32, NULL, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create surfaces\n")); return; } // bind surfaces to context if (vaCreateContext(VaDisplay, config_id, 1920, 1080, VA_PROGRESSIVE, surfaces, 32, &context_id)) { Error(_("codec: can't create context")); return; } #if 1 // without this 1080i will crash image->image_id = VA_INVALID_ID; if (vaDeriveImage(VaDisplay, surfaces[0], image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed\n")); } if (image->image_id != VA_INVALID_ID) { if (vaDestroyImage(VaDisplay, image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } } #else vaBeginPicture(VaDisplay, context_id, surfaces[0]); vaRenderPicture(VaDisplay, context_id, NULL, 0); // aborts without valid buffers upload vaEndPicture(VaDisplay, context_id); #endif start_tick = GetMsTicks(); for (n = 1; n < 2; ++n) { if (vaPutSurface(VaDisplay, surfaces[0], VideoWindow, // decoder src 0, 0, 1920, 1080, // video dst 0, 0, 1920, 1080, NULL, 0, VA_TOP_FIELD | VA_CLEAR_DRAWABLE) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed\n")); } if (vaPutSurface(VaDisplay, surfaces[0], VideoWindow, // decoder src 0, 0, 1920, 1080, // video dst 0, 0, 1920, 1080, NULL, 0, VA_BOTTOM_FIELD | VA_CLEAR_DRAWABLE) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed\n")); } tick = GetMsTicks(); if (!(n % 10)) { fprintf(stderr, "%dms / frame\n", (tick - start_tick) / n); } } // destory the stuff. if (vaDestroyContext(VaDisplay, context_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy context!\n")); } if (vaDestroySurfaces(VaDisplay, surfaces, 32) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy surfaces\n")); } if (vaDestroyConfig(VaDisplay, config_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy config!\n")); } fprintf(stderr, "done\n"); } #endif /// /// VA-API setup. /// /// @param display_name x11/xcb display name /// /// @returns true if VA-API could be initialized, false otherwise. /// static int VaapiInit(const char *display_name) { int major; int minor; VADisplayAttribute attr; const char *s; VaOsdImage.image_id = VA_INVALID_ID; VaOsdSubpicture = VA_INVALID_ID; #ifdef USE_GLX if (GlxEnabled) { // support glx VaDisplay = vaGetDisplayGLX(XlibDisplay); } else #endif { VaDisplay = vaGetDisplay(XlibDisplay); } if (!VaDisplay) { Error(_("video/vaapi: Can't connect VA-API to X11 server on '%s'\n"), display_name); return 0; } // XvBA needs this: setenv("DISPLAY", display_name, 1); if (vaInitialize(VaDisplay, &major, &minor) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: Can't inititialize VA-API on '%s'\n"), display_name); vaTerminate(VaDisplay); VaDisplay = NULL; return 0; } s = vaQueryVendorString(VaDisplay); Info(_("video/vaapi: libva %d.%d (%s) initialized\n"), major, minor, s); // // Setup fixes for driver bugs. // if (strstr(s, "VDPAU")) { Info(_("video/vaapi: use vdpau bug workaround\n")); setenv("VDPAU_VIDEO_PUTSURFACE_FAST", "0", 0); VaapiBuggyVdpau = 1; } if (strstr(s, "XvBA")) { VaapiBuggyXvBA = 1; } if (strstr(s, "Intel i965")) { VaapiBuggyIntel = 1; } // // check which attributes are supported // attr.type = VADisplayAttribBackgroundColor; attr.flags = VA_DISPLAY_ATTRIB_SETTABLE; if (vaGetDisplayAttributes(VaDisplay, &attr, 1) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: Can't get background-color attribute\n")); attr.value = 1; } Info(_("video/vaapi: background-color is %s\n"), attr.value ? _("supported") : _("unsupported")); // FIXME: VaapiSetBackground(VideoBackground); #if 0 // // check the chroma format // attr.type = VAConfigAttribRTFormat attr.flags = VA_DISPLAY_ATTRIB_GETTABLE; Vaapi1080i(); #endif #if VA_CHECK_VERSION(0,33,99) // // check vpp support // if (1) { VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int i; VaapiVideoProcessing = 0; if (!vaQueryConfigEntrypoints(VaDisplay, VAProfileNone, entrypoints, &entrypoint_n)) { for (i = 0; i < entrypoint_n; i++) { fprintf(stderr, "oops %d\n", i); if (entrypoints[i] == VAEntrypointVideoProc) { Info("video/vaapi: supports video processing\n"); VaapiVideoProcessing = 1; break; } } } } #endif return 1; } #ifdef USE_GLX /// /// VA-API GLX setup. /// /// @param display_name x11/xcb display name /// /// @returns true if VA-API could be initialized, false otherwise. /// static int VaapiGlxInit(const char *display_name) { GlxEnabled = 1; GlxInit(); if (GlxEnabled) { GlxSetupWindow(VideoWindow, VideoWindowWidth, VideoWindowHeight, GlxContext); } if (!GlxEnabled) { Error(_("video/glx: glx error\n")); } return VaapiInit(display_name); } #endif /// /// VA-API cleanup /// static void VaapiExit(void) { int i; // FIXME: more VA-API cleanups... for (i = 0; i < VaapiDecoderN; ++i) { if (VaapiDecoders[i]) { VaapiDelHwDecoder(VaapiDecoders[i]); VaapiDecoders[i] = NULL; } } VaapiDecoderN = 0; if (!VaDisplay) { vaTerminate(VaDisplay); VaDisplay = NULL; } } //---------------------------------------------------------------------------- /// /// Update output for new size or aspect ratio. /// /// @param decoder VA-API decoder /// static void VaapiUpdateOutput(VaapiDecoder * decoder) { VideoUpdateOutput(decoder->InputAspect, decoder->InputWidth, decoder->InputHeight, decoder->Resolution, decoder->VideoX, decoder->VideoY, decoder->VideoWidth, decoder->VideoHeight, &decoder->OutputX, &decoder->OutputY, &decoder->OutputWidth, &decoder->OutputHeight, &decoder->CropX, &decoder->CropY, &decoder->CropWidth, &decoder->CropHeight); #ifdef USE_AUTOCROP decoder->AutoCrop->State = 0; decoder->AutoCrop->Count = AutoCropDelay; #endif } /// /// Find VA-API image format. /// /// @param decoder VA-API decoder /// @param pix_fmt ffmpeg pixel format /// @param[out] format image format /// /// FIXME: can fallback from I420 to YV12, if not supported /// FIXME: must check if put/get with this format is supported (see intel) /// static int VaapiFindImageFormat(VaapiDecoder * decoder, enum PixelFormat pix_fmt, VAImageFormat * format) { VAImageFormat *imgfrmts; int imgfrmt_n; int i; unsigned fourcc; switch (pix_fmt) { // convert ffmpeg to VA-API // NV12, YV12, I420, BGRA // intel: I420 is native format for MPEG-2 decoded surfaces // intel: NV12 is native format for H.264 decoded surfaces case PIX_FMT_YUV420P: case PIX_FMT_YUVJ420P: // fourcc = VA_FOURCC_YV12; // YVU fourcc = VA_FOURCC('I', '4', '2', '0'); // YUV break; case PIX_FMT_NV12: fourcc = VA_FOURCC_NV12; break; default: Fatal(_("video/vaapi: unsupported pixel format %d\n"), pix_fmt); } imgfrmt_n = vaMaxNumImageFormats(decoder->VaDisplay); imgfrmts = alloca(imgfrmt_n * sizeof(*imgfrmts)); if (vaQueryImageFormats(decoder->VaDisplay, imgfrmts, &imgfrmt_n) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQueryImageFormats failed\n")); return 0; } Debug(3, "video/vaapi: search format %c%c%c%c in %d image formats\n", fourcc, fourcc >> 8, fourcc >> 16, fourcc >> 24, imgfrmt_n); Debug(3, "video/vaapi: supported image formats:\n"); for (i = 0; i < imgfrmt_n; ++i) { Debug(3, "video/vaapi:\t%c%c%c%c\t%d\n", imgfrmts[i].fourcc, imgfrmts[i].fourcc >> 8, imgfrmts[i].fourcc >> 16, imgfrmts[i].fourcc >> 24, imgfrmts[i].depth); } // // search image format // for (i = 0; i < imgfrmt_n; ++i) { if (imgfrmts[i].fourcc == fourcc) { *format = imgfrmts[i]; Debug(3, "video/vaapi: use\t%c%c%c%c\t%d\n", imgfrmts[i].fourcc, imgfrmts[i].fourcc >> 8, imgfrmts[i].fourcc >> 16, imgfrmts[i].fourcc >> 24, imgfrmts[i].depth); return 1; } } Fatal("video/vaapi: pixel format %d unsupported by VA-API\n", pix_fmt); // FIXME: no fatal error! return 0; } /// /// Configure VA-API for new video format. /// /// @param decoder VA-API decoder /// static void VaapiSetup(VaapiDecoder * decoder, const AVCodecContext * video_ctx) { int width; int height; VAImageFormat format[1]; // create initial black surface and display VaapiBlackSurface(decoder); // cleanup last context VaapiCleanup(decoder); width = video_ctx->width; height = video_ctx->height; #ifdef DEBUG // FIXME: remove this if if (decoder->Image->image_id != VA_INVALID_ID) { abort(); // should be done by VaapiCleanup() } #endif // FIXME: PixFmt not set! //VaapiFindImageFormat(decoder, decoder->PixFmt, format); VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); // FIXME: this image is only needed for software decoder and auto-crop if (decoder->GetPutImage && vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } Debug(3, "video/vaapi: created image %dx%d with id 0x%08x and buffer id 0x%08x\n", width, height, decoder->Image->image_id, decoder->Image->buf); // FIXME: interlaced not valid here? decoder->Resolution = VideoResolutionGroup(width, height, decoder->Interlaced); VaapiCreateSurfaces(decoder, width, height); #ifdef USE_GLX if (GlxEnabled) { // FIXME: destroy old context GlxSetupDecoder(decoder->InputWidth, decoder->InputHeight, decoder->GlTextures); // FIXME: try two textures if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, decoder->GlTextures[0], &decoder->GlxSurfaces[0]) != VA_STATUS_SUCCESS) { Fatal(_("video/glx: can't create glx surfaces\n")); // FIXME: no fatal here } /* if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, decoder->GlTextures[1], &decoder->GlxSurfaces[1]) != VA_STATUS_SUCCESS) { Fatal(_("video/glx: can't create glx surfaces\n")); } */ } #endif VaapiUpdateOutput(decoder); // // update OSD associate // #ifdef USE_GLX if (GlxEnabled) { return; } #endif VaapiAssociate(decoder); } /// /// Configure VA-API for new video format. /// /// @param decoder VA-API decoder /// static void VaapiSetupVideoProcessing(VaapiDecoder * decoder) { #if VA_CHECK_VERSION(0,33,99) VAProcFilterType filtertypes[VAProcFilterCount]; unsigned filtertype_n; unsigned u; unsigned v; VAProcFilterCap denoise_caps[1]; unsigned denoise_cap_n; VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount]; unsigned deinterlacing_cap_n; VABufferID denoise_filter; VABufferID deint_filter; VABufferID sharpen_filter; VABufferID color_filter; VABufferID filters[VAProcFilterCount]; unsigned filter_n; if (!VaapiVideoProcessing) { return; } // // display and filter infos. // filtertype_n = VAProcFilterCount; // API break this must be done vaQueryVideoProcFilters(VaDisplay, decoder->VaapiContext->context_id, filtertypes, &filtertype_n); for (u = 0; u < filtertype_n; ++u) { switch (filtertypes[u]) { case VAProcFilterNoiseReduction: Info("video/vaapi: noise reduction supported\n"); denoise_cap_n = 1; vaQueryVideoProcFilterCaps(VaDisplay, decoder->VaapiContext->context_id, VAProcFilterNoiseReduction, denoise_caps, &denoise_cap_n); if (denoise_cap_n) { Info("video/vaapi: %.2f - %.2f ++ %.2f = %.2f\n", denoise_caps->range.min_value, denoise_caps->range.max_value, denoise_caps->range.step, denoise_caps->range.default_value); } break; case VAProcFilterDeinterlacing: Info("video/vaapi: deinterlacing supported\n"); deinterlacing_cap_n = VAProcDeinterlacingCount; vaQueryVideoProcFilterCaps(VaDisplay, decoder->VaapiContext->context_id, VAProcFilterDeinterlacing, deinterlacing_caps, &deinterlacing_cap_n); for (v = 0; v < deinterlacing_cap_n; ++v) { switch (deinterlacing_caps[v].type) { case VAProcDeinterlacingBob: Info("video/vaapi: bob deinterlace supported\n"); break; case VAProcDeinterlacingWeave: Info("video/vaapi: weave deinterlace supported\n"); break; case VAProcDeinterlacingMotionAdaptive: Info("video/vaapi: motion adaptive deinterlace supported\n"); break; case VAProcDeinterlacingMotionCompensated: Info("video/vaapi: motion compensated deinterlace supported\n"); break; default: Info("video/vaapi: unsupported deinterlace #%02x\n", deinterlacing_caps[v].type); break; } } break; case VAProcFilterSharpening: Info("video/vaapi: sharpening supported\n"); break; case VAProcFilterColorBalance: Info("video/vaapi: color balance supported\n"); break; default: Info("video/vaapi: unsupported filter #%02x\n", filtertypes[u]); break; } } // // create pipeline filters // filter_n = 0; filtertype_n = VAProcFilterCount; vaQueryVideoProcFilters(VaDisplay, decoder->VaapiContext->context_id, filtertypes, &filtertype_n); for (u = 0; u < filtertype_n; ++u) { switch (filtertypes[u]) { case VAProcFilterNoiseReduction: break; case VAProcFilterDeinterlacing: break; case VAProcFilterSharpening: break; case VAProcFilterColorBalance: break; default: break; } } // // query pipeline caps // #endif } /// /// Get a free surface. Called from ffmpeg. /// /// @param decoder VA-API decoder /// @param video_ctx ffmpeg video codec context /// /// @returns the oldest free surface /// static VASurfaceID VaapiGetSurface(VaapiDecoder * decoder, const AVCodecContext * video_ctx) { #ifdef FFMPEG_BUG1_WORKAROUND // get_format not called with valid informations. if (video_ctx->width != decoder->InputWidth || video_ctx->height != decoder->InputHeight) { VAStatus status; decoder->InputWidth = video_ctx->width; decoder->InputHeight = video_ctx->height; decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiSetup(decoder, video_ctx); // create a configuration for the decode pipeline if ((status = vaCreateConfig(decoder->VaDisplay, decoder->Profile, decoder->Entrypoint, NULL, 0, &decoder->VaapiContext->config_id))) { Error(_("video/vaapi: can't create config '%s'\n"), vaErrorStr(status)); // bind surfaces to context } else if ((status = vaCreateContext(decoder->VaDisplay, decoder->VaapiContext->config_id, video_ctx->width, video_ctx->height, VA_PROGRESSIVE, decoder->SurfacesFree, decoder->SurfaceFreeN, &decoder->VaapiContext->context_id))) { Error(_("video/vaapi: can't create context '%s'\n"), vaErrorStr(status)); } // FIXME: too late to switch to software rending on failures VaapiSetupVideoProcessing(decoder); } #else (void)video_ctx; #endif return VaapiGetSurface0(decoder); } /// /// Find VA-API profile. /// /// Check if the requested profile is supported by VA-API. /// /// @param profiles a table of all supported profiles /// @param n number of supported profiles /// @param profile requested profile /// /// @returns the profile if supported, -1 if unsupported. /// static VAProfile VaapiFindProfile(const VAProfile * profiles, unsigned n, VAProfile profile) { unsigned u; for (u = 0; u < n; ++u) { if (profiles[u] == profile) { return profile; } } return -1; } /// /// Find VA-API entry point. /// /// Check if the requested entry point is supported by VA-API. /// /// @param entrypoints a table of all supported entrypoints /// @param n number of supported entrypoints /// @param entrypoint requested entrypoint /// /// @returns the entry point if supported, -1 if unsupported. /// static VAEntrypoint VaapiFindEntrypoint(const VAEntrypoint * entrypoints, unsigned n, VAEntrypoint entrypoint) { unsigned u; for (u = 0; u < n; ++u) { if (entrypoints[u] == entrypoint) { return entrypoint; } } return -1; } /// /// Callback to negotiate the PixelFormat. /// /// @param fmt is the list of formats which are supported by the codec, /// it is terminated by -1 as 0 is a valid format, the /// formats are ordered by quality. /// /// @note + 2 surface for software deinterlace /// static enum PixelFormat Vaapi_get_format(VaapiDecoder * decoder, AVCodecContext * video_ctx, const enum PixelFormat *fmt) { const enum PixelFormat *fmt_idx; VAProfile profiles[vaMaxNumProfiles(VaDisplay)]; int profile_n; VAEntrypoint entrypoints[vaMaxNumEntrypoints(VaDisplay)]; int entrypoint_n; int p; int e; VAConfigAttrib attrib; if (!VideoHardwareDecoder || (video_ctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && VideoHardwareDecoder == 1) ) { // hardware disabled by config Debug(3, "codec: hardware acceleration disabled\n"); goto slow_path; } p = -1; e = -1; // prepare va-api profiles if (vaQueryConfigProfiles(VaDisplay, profiles, &profile_n)) { Error(_("codec: vaQueryConfigProfiles failed")); goto slow_path; } Debug(3, "codec: %d profiles\n", profile_n); // check profile switch (video_ctx->codec_id) { case AV_CODEC_ID_MPEG2VIDEO: decoder->SurfacesNeeded = CODEC_SURFACES_MPEG2 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileMPEG2Main); break; case AV_CODEC_ID_MPEG4: case AV_CODEC_ID_H263: decoder->SurfacesNeeded = CODEC_SURFACES_MPEG4 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileMPEG4AdvancedSimple); break; case AV_CODEC_ID_H264: decoder->SurfacesNeeded = CODEC_SURFACES_H264 + VIDEO_SURFACES_MAX + 2; // try more simple formats, fallback to better if (video_ctx->profile == FF_PROFILE_H264_BASELINE) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Baseline); if (p == -1) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Main); } } else if (video_ctx->profile == FF_PROFILE_H264_MAIN) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264Main); } if (p == -1) { p = VaapiFindProfile(profiles, profile_n, VAProfileH264High); } break; case AV_CODEC_ID_WMV3: decoder->SurfacesNeeded = CODEC_SURFACES_VC1 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileVC1Main); break; case AV_CODEC_ID_VC1: decoder->SurfacesNeeded = CODEC_SURFACES_VC1 + VIDEO_SURFACES_MAX + 2; p = VaapiFindProfile(profiles, profile_n, VAProfileVC1Advanced); break; default: goto slow_path; } if (p == -1) { Debug(3, "\tno profile found\n"); goto slow_path; } Debug(3, "\tprofile %d\n", p); // prepare va-api entry points if (vaQueryConfigEntrypoints(VaDisplay, p, entrypoints, &entrypoint_n)) { Error(_("codec: vaQueryConfigEntrypoints failed")); goto slow_path; } Debug(3, "codec: %d entrypoints\n", entrypoint_n); // look through formats for (fmt_idx = fmt; *fmt_idx != PIX_FMT_NONE; fmt_idx++) { Debug(3, "\t%#010x %s\n", *fmt_idx, av_get_pix_fmt_name(*fmt_idx)); // check supported pixel format with entry point switch (*fmt_idx) { case PIX_FMT_VAAPI_VLD: e = VaapiFindEntrypoint(entrypoints, entrypoint_n, VAEntrypointVLD); break; case PIX_FMT_VAAPI_MOCO: case PIX_FMT_VAAPI_IDCT: Debug(3, "codec: this VA-API pixel format is not supported\n"); default: continue; } if (e != -1) { Debug(3, "\tentry point %d\n", e); break; } } if (e == -1) { Warning(_("codec: unsupported: slow path\n")); goto slow_path; } // // prepare decoder config // memset(&attrib, 0, sizeof(attrib)); attrib.type = VAConfigAttribRTFormat; if (vaGetConfigAttributes(decoder->VaDisplay, p, e, &attrib, 1)) { Error(_("codec: can't get attributes")); goto slow_path; } if (attrib.value & VA_RT_FORMAT_YUV420) { Info(_("codec: YUV 420 supported\n")); } if (attrib.value & VA_RT_FORMAT_YUV422) { Info(_("codec: YUV 422 supported\n")); } if (attrib.value & VA_RT_FORMAT_YUV444) { Info(_("codec: YUV 444 supported\n")); } if (!(attrib.value & VA_RT_FORMAT_YUV420)) { Warning(_("codec: YUV 420 not supported\n")); goto slow_path; } decoder->Profile = p; decoder->Entrypoint = e; decoder->PixFmt = *fmt_idx; decoder->InputWidth = 0; decoder->InputHeight = 0; #ifndef FFMPEG_BUG1_WORKAROUND if (video_ctx->width && video_ctx->height) { VAStatus status; decoder->InputWidth = video_ctx->width; decoder->InputHeight = video_ctx->height; decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiSetup(decoder, video_ctx); // FIXME: move the following into VaapiSetup // create a configuration for the decode pipeline if ((status = vaCreateConfig(decoder->VaDisplay, p, e, &attrib, 1, &decoder->VaapiContext->config_id))) { Error(_("codec: can't create config '%s'\n"), vaErrorStr(status)); goto slow_path; } // bind surfaces to context if ((status = vaCreateContext(decoder->VaDisplay, decoder->VaapiContext->config_id, video_ctx->width, video_ctx->height, VA_PROGRESSIVE, decoder->SurfacesFree, decoder->SurfaceFreeN, &decoder->VaapiContext->context_id))) { Error(_("codec: can't create context '%s'\n"), vaErrorStr(status)); goto slow_path; } VaapiSetupVideoProcessing(decoder); } #endif Debug(3, "\t%#010x %s\n", fmt_idx[0], av_get_pix_fmt_name(fmt_idx[0])); return *fmt_idx; slow_path: // no accelerated format found decoder->Profile = VA_INVALID_ID; decoder->Entrypoint = VA_INVALID_ID; decoder->VaapiContext->config_id = VA_INVALID_ID; decoder->SurfacesNeeded = VIDEO_SURFACES_MAX + 2; decoder->PixFmt = PIX_FMT_NONE; decoder->InputWidth = 0; decoder->InputHeight = 0; video_ctx->hwaccel_context = NULL; return avcodec_default_get_format(video_ctx, fmt); } /// /// Draw surface of the VA-API decoder with x11. /// /// vaPutSurface with intel backend does sync on v-sync. /// /// @param decoder VA-API decoder /// @param surface VA-API surface id /// @param interlaced flag interlaced source /// @param top_field_first flag top_field_first for interlaced source /// @param field interlaced draw: 0 first field, 1 second field /// static void VaapiPutSurfaceX11(VaapiDecoder * decoder, VASurfaceID surface, int interlaced, int top_field_first, int field) { unsigned type; VAStatus status; uint32_t s; uint32_t e; // deinterlace if (interlaced && VideoDeinterlace[decoder->Resolution] < VideoDeinterlaceSoftBob && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { if (top_field_first) { if (field) { type = VA_BOTTOM_FIELD; } else { type = VA_TOP_FIELD; } } else { if (field) { type = VA_TOP_FIELD; } else { type = VA_BOTTOM_FIELD; } } } else { type = VA_FRAME_PICTURE; } s = GetMsTicks(); xcb_flush(Connection); if ((status = vaPutSurface(decoder->VaDisplay, surface, decoder->Window, // decoder src decoder->CropX, decoder->CropY, decoder->CropWidth, decoder->CropHeight, // video dst decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, NULL, 0, type | decoder->SurfaceFlagsTable[decoder->Resolution])) != VA_STATUS_SUCCESS) { // switching video kills VdpPresentationQueueBlockUntilSurfaceIdle Error(_("video/vaapi: vaPutSurface failed %d\n"), status); } if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } e = GetMsTicks(); if (e - s > 2000) { Error(_("video/vaapi: gpu hung %dms %d\n"), e - s, decoder->FrameCounter); fprintf(stderr, _("video/vaapi: gpu hung %dms %d\n"), e - s, decoder->FrameCounter); } if (0) { // check if surface is really ready // VDPAU backend, says always ready VASurfaceStatus status; if (vaQuerySurfaceStatus(decoder->VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); status = VASurfaceReady; } if (status != VASurfaceReady) { Warning(_ ("video/vaapi: surface %#010x not ready: still displayed %d\n"), surface, status); return; } } if (0) { int i; // look how the status changes the next 40ms for (i = 0; i < 40; ++i) { VASurfaceStatus status; if (vaQuerySurfaceStatus(VaDisplay, surface, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); } Debug(3, "video/vaapi: %2d %d\n", i, status); usleep(1 * 1000); } } usleep(1 * 1000); } #ifdef USE_GLX /// /// Draw surface of the VA-API decoder with glx. /// /// @param decoder VA-API decoder /// @param surface VA-API surface id /// @param interlaced flag interlaced source /// @param top_field_first flag top_field_first for interlaced source /// @param field interlaced draw: 0 first field, 1 second field /// static void VaapiPutSurfaceGLX(VaapiDecoder * decoder, VASurfaceID surface, int interlaced, int top_field_first, int field) { unsigned type; //uint32_t start; //uint32_t copy; //uint32_t end; // deinterlace if (interlaced && VideoDeinterlace[decoder->Resolution] < VideoDeinterlaceSoftBob && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { if (top_field_first) { if (field) { type = VA_BOTTOM_FIELD; } else { type = VA_TOP_FIELD; } } else { if (field) { type = VA_TOP_FIELD; } else { type = VA_BOTTOM_FIELD; } } } else { type = VA_FRAME_PICTURE; } //start = GetMsTicks(); if (vaCopySurfaceGLX(decoder->VaDisplay, decoder->GlxSurfaces[0], surface, type | decoder->SurfaceFlagsTable[decoder->Resolution]) != VA_STATUS_SUCCESS) { Error(_("video/glx: vaCopySurfaceGLX failed\n")); return; } //copy = GetMsTicks(); // hardware surfaces are always busy // FIXME: CropX, ... GlxRenderTexture(decoder->GlTextures[0], decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight); //end = GetMsTicks(); //Debug(3, "video/vaapi/glx: %d copy %d render\n", copy - start, end - copy); } #endif #ifdef USE_AUTOCROP /// /// VA-API auto-crop support. /// /// @param decoder VA-API hw decoder /// static void VaapiAutoCrop(VaapiDecoder * decoder) { VASurfaceID surface; uint32_t width; uint32_t height; void *va_image_data; void *data[3]; uint32_t pitches[3]; int crop14; int crop16; int next_state; int i; width = decoder->InputWidth; height = decoder->InputHeight; again: if (decoder->GetPutImage && decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; Debug(3, "video/vaapi: download image not available\n"); // FIXME: PixFmt not set! //VaapiFindImageFormat(decoder, decoder->PixFmt, format); VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); //VaapiFindImageFormat(decoder, PIX_FMT_YUV420P, format); if (vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); return; } } // no problem to go back, we just wrote it // FIXME: we can pass the surface through. surface = decoder->SurfacesRb[(decoder->SurfaceWrite + VIDEO_SURFACES_MAX - 1) % VIDEO_SURFACES_MAX]; // Copy data from frame to image if (!decoder->GetPutImage && vaDeriveImage(decoder->VaDisplay, surface, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed\n")); decoder->GetPutImage = 1; goto again; } if (decoder->GetPutImage && (i = vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, decoder->Image->image_id)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get auto-crop image %d\n"), i); printf(_("video/vaapi: can't get auto-crop image %d\n"), i); return; } if (vaMapBuffer(VaDisplay, decoder->Image->buf, &va_image_data) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map auto-crop image!\n")); return; } // convert vaapi to our frame format for (i = 0; (unsigned)i < decoder->Image->num_planes; ++i) { data[i] = va_image_data + decoder->Image->offsets[i]; pitches[i] = decoder->Image->pitches[i]; } AutoCropDetect(decoder->AutoCrop, width, height, data, pitches); if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap auto-crop image!\n")); } if (!decoder->GetPutImage) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } // FIXME: this a copy of vdpau, combine the two same things // ignore black frames if (decoder->AutoCrop->Y1 >= decoder->AutoCrop->Y2) { return; } crop14 = (decoder->InputWidth * decoder->InputAspect.num * 9) / (decoder->InputAspect.den * 14); crop14 = (decoder->InputHeight - crop14) / 2; crop16 = (decoder->InputWidth * decoder->InputAspect.num * 9) / (decoder->InputAspect.den * 16); crop16 = (decoder->InputHeight - crop16) / 2; if (decoder->AutoCrop->Y1 >= crop16 - AutoCropTolerance && decoder->InputHeight - decoder->AutoCrop->Y2 >= crop16 - AutoCropTolerance) { next_state = 16; } else if (decoder->AutoCrop->Y1 >= crop14 - AutoCropTolerance && decoder->InputHeight - decoder->AutoCrop->Y2 >= crop14 - AutoCropTolerance) { next_state = 14; } else { next_state = 0; } if (decoder->AutoCrop->State == next_state) { return; } Debug(3, "video: crop aspect %d:%d %d/%d %+d%+d\n", decoder->InputAspect.num, decoder->InputAspect.den, crop14, crop16, decoder->AutoCrop->Y1, decoder->InputHeight - decoder->AutoCrop->Y2); Debug(3, "video: crop aspect %d -> %d\n", decoder->AutoCrop->State, next_state); switch (decoder->AutoCrop->State) { case 16: case 14: if (decoder->AutoCrop->Count++ < AutoCropDelay / 2) { return; } break; case 0: if (decoder->AutoCrop->Count++ < AutoCropDelay) { return; } break; } decoder->AutoCrop->State = next_state; if (next_state) { decoder->CropX = VideoCutLeftRight[decoder->Resolution]; decoder->CropY = (next_state == 16 ? crop16 : crop14) + VideoCutTopBottom[decoder->Resolution]; decoder->CropWidth = decoder->InputWidth - decoder->CropX * 2; decoder->CropHeight = decoder->InputHeight - decoder->CropY * 2; // FIXME: this overwrites user choosen output position // FIXME: resize kills the auto crop values // FIXME: support other 4:3 zoom modes decoder->OutputX = decoder->VideoX; decoder->OutputY = decoder->VideoY; decoder->OutputWidth = (decoder->VideoHeight * next_state) / 9; decoder->OutputHeight = (decoder->VideoWidth * 9) / next_state; if (decoder->OutputWidth > decoder->VideoWidth) { decoder->OutputWidth = decoder->VideoWidth; decoder->OutputY = (decoder->VideoHeight - decoder->OutputHeight) / 2; } else if (decoder->OutputHeight > decoder->VideoHeight) { decoder->OutputHeight = decoder->VideoHeight; decoder->OutputX = (decoder->VideoWidth - decoder->OutputWidth) / 2; } Debug(3, "video: aspect output %dx%d %dx%d%+d%+d\n", decoder->InputWidth, decoder->InputHeight, decoder->OutputWidth, decoder->OutputHeight, decoder->OutputX, decoder->OutputY); } else { // sets AutoCrop->Count VaapiUpdateOutput(decoder); } decoder->AutoCrop->Count = 0; // // update OSD associate // VaapiDeassociate(decoder); VaapiAssociate(decoder); } /// /// VA-API check if auto-crop todo. /// /// @param decoder VA-API hw decoder /// /// @note a copy of VdpauCheckAutoCrop /// @note auto-crop only supported with normal 4:3 display mode /// static void VaapiCheckAutoCrop(VaapiDecoder * decoder) { // reduce load, check only n frames if (Video4to3ZoomMode == VideoNormal && AutoCropInterval && !(decoder->FrameCounter % AutoCropInterval)) { AVRational input_aspect_ratio; AVRational tmp_ratio; av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, decoder->InputWidth * decoder->InputAspect.num, decoder->InputHeight * decoder->InputAspect.den, 1024 * 1024); tmp_ratio.num = 4; tmp_ratio.den = 3; // only 4:3 with 16:9/14:9 inside supported if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) { VaapiAutoCrop(decoder); } else { decoder->AutoCrop->Count = 0; decoder->AutoCrop->State = 0; } } } /// /// VA-API reset auto-crop. /// static void VaapiResetAutoCrop(void) { int i; for (i = 0; i < VaapiDecoderN; ++i) { VaapiDecoders[i]->AutoCrop->State = 0; VaapiDecoders[i]->AutoCrop->Count = 0; } } #endif /// /// Queue output surface. /// /// @param decoder VA-API decoder /// @param surface output surface /// @param softdec software decoder /// /// @note we can't mix software and hardware decoder surfaces /// static void VaapiQueueSurface(VaapiDecoder * decoder, VASurfaceID surface, int softdec) { VASurfaceID old; ++decoder->FrameCounter; if (1) { // can't wait for output queue empty if (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) { ++decoder->FramesDropped; Warning(_("video: output buffer full, dropping frame (%d/%d)\n"), decoder->FramesDropped, decoder->FrameCounter); if (!(decoder->FramesDisplayed % 300)) { VaapiPrintFrames(decoder); } if (softdec) { // software surfaces only VaapiReleaseSurface(decoder, surface); } return; } #if 0 } else { // wait for output queue empty while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) { VideoDisplayHandler(); } #endif } // // Check and release, old surface // if ((old = decoder->SurfacesRb[decoder->SurfaceWrite]) != VA_INVALID_ID) { #if 0 if (vaSyncSurface(decoder->VaDisplay, old) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } VASurfaceStatus status; if (vaQuerySurfaceStatus(decoder->VaDisplay, old, &status) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaQuerySurface failed\n")); status = VASurfaceReady; } if (status != VASurfaceReady) { Warning(_ ("video/vaapi: surface %#010x not ready: still displayed %d\n"), old, status); if (0 && vaSyncSurface(decoder->VaDisplay, old) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #endif // now we can release the surface if (softdec) { // software surfaces only VaapiReleaseSurface(decoder, old); } } #if 0 // FIXME: intel seems to forget this, nvidia GT 210 has speed problems here if (VaapiBuggyIntel && VaOsdSubpicture != VA_INVALID_ID) { // FIXME: associate only if osd is displayed // // associate the OSD with surface // if (VaapiUnscaledOsd) { if (vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, &surface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, VideoWindowWidth, VideoWindowHeight, VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } else { // FIXME: auto-crop wrong position if (vaAssociateSubpicture(VaDisplay, VaOsdSubpicture, &surface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, decoder->InputWidth, decoder->InputHeight, 0) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture\n")); } } } #endif decoder->SurfacesRb[decoder->SurfaceWrite] = surface; decoder->SurfaceWrite = (decoder->SurfaceWrite + 1) % VIDEO_SURFACES_MAX; atomic_inc(&decoder->SurfacesFilled); Debug(4, "video/vaapi: yy video surface %#010x ready\n", surface); } /// /// Create and display a black empty surface. /// /// @param decoder VA-API decoder /// static void VaapiBlackSurface(VaapiDecoder * decoder) { VAStatus status; #ifdef DEBUG uint32_t start; #endif uint32_t sync; uint32_t put1; #ifdef USE_GLX if (GlxEnabled) { // already done return; } #endif // wait until we have osd subpicture if (VaOsdSubpicture == VA_INVALID_ID) { Warning(_("video/vaapi: no osd subpicture yet\n")); return; } if (decoder->BlackSurface == VA_INVALID_ID) { uint8_t *va_image_data; unsigned u; status = vaCreateSurfaces(decoder->VaDisplay, VA_RT_FORMAT_YUV420, VideoWindowWidth, VideoWindowHeight, &decoder->BlackSurface, 1, NULL, 0); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create a surface: %s\n"), vaErrorStr(status)); return; } // full sized surface, no difference unscaled/scaled osd status = vaAssociateSubpicture(decoder->VaDisplay, VaOsdSubpicture, &decoder->BlackSurface, 1, 0, 0, VaOsdImage.width, VaOsdImage.height, 0, 0, VideoWindowWidth, VideoWindowHeight, 0); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't associate subpicture: %s\n"), vaErrorStr(status)); } Debug(3, "video/vaapi: associate %08x\n", decoder->BlackSurface); if (decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); status = vaCreateImage(VaDisplay, format, VideoWindowWidth, VideoWindowHeight, decoder->Image); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image: %s\n"), vaErrorStr(status)); return; } } status = vaMapBuffer(VaDisplay, decoder->Image->buf, (void **)&va_image_data); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map the image: %s\n"), vaErrorStr(status)); return; } for (u = 0; u < decoder->Image->data_size; ++u) { if (u < decoder->Image->offsets[1]) { va_image_data[u] = 0x00; // Y } else if (u % 2 == 0) { va_image_data[u] = 0x80; // U } else { #ifdef DEBUG // make black surface visible va_image_data[u] = 0xFF; // V #else va_image_data[u] = 0x80; // V #endif } } if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap the image!\n")); } if (decoder->GetPutImage) { status = vaPutImage(VaDisplay, decoder->BlackSurface, decoder->Image->image_id, 0, 0, VideoWindowWidth, VideoWindowHeight, 0, 0, VideoWindowWidth, VideoWindowHeight); if (status != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image!\n")); } } else { // FIXME: PutImage isn't always supported Debug(3, "video/vaapi: put image not supported, alternative path not written\n"); } #ifdef DEBUG start = GetMsTicks(); #endif if (vaSyncSurface(decoder->VaDisplay, decoder->BlackSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } else { #ifdef DEBUG start = GetMsTicks(); #endif } Debug(4, "video/vaapi: yy black video surface %#010x displayed\n", decoder->BlackSurface); sync = GetMsTicks(); xcb_flush(Connection); if ((status = vaPutSurface(decoder->VaDisplay, decoder->BlackSurface, decoder->Window, // decoder src decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, // video dst decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputHeight, NULL, 0, VA_FRAME_PICTURE)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaPutSurface failed %d\n"), status); } clock_gettime(CLOCK_MONOTONIC, &decoder->FrameTime); put1 = GetMsTicks(); if (put1 - sync > 2000) { Error(_("video/vaapi: gpu hung %dms %d\n"), put1 - sync, decoder->FrameCounter); fprintf(stderr, _("video/vaapi: gpu hung %dms %d\n"), put1 - sync, decoder->FrameCounter); } Debug(4, "video/vaapi: sync %2u put1 %2u\n", sync - start, put1 - sync); if (0 && vaSyncSurface(decoder->VaDisplay, decoder->BlackSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } usleep(1 * 1000); } #define noUSE_VECTOR ///< use gcc vector extension #ifdef USE_VECTOR typedef char v16qi __attribute__ ((vector_size(16))); typedef char v8qi __attribute__ ((vector_size(8))); typedef int16_t v4hi __attribute__ ((vector_size(4))); typedef int16_t v8hi __attribute__ ((vector_size(8))); /// /// ELA Edge-based Line Averaging /// Low-Complexity Interpolation Method /// /// abcdefg abcdefg abcdefg abcdefg abcdefg /// x x x x x /// hijklmn hijklmn hijklmn hijklmn hijklmn /// static void FilterLineSpatial(uint8_t * dst, const uint8_t * cur, int width, int above, int below, int next) { int x; // 8/16 128bit xmm register for (x = 0; x < width; x += 8) { v8qi c; v8qi d; v8qi e; v8qi j; v8qi k; v8qi l; v8qi t1; v8qi t2; v8qi pred; v8qi score_l; v8qi score_h; v8qi t_l; v8qi t_h; v8qi zero; // ignore bound violation d = *(v8qi *) & cur[above + x]; k = *(v8qi *) & cur[below + x]; pred = __builtin_ia32_pavgb(d, k); // score = ABS(c - j) + ABS(d - k) + ABS(e - l); c = *(v8qi *) & cur[above + x - 1 * next]; e = *(v8qi *) & cur[above + x + 1 * next]; j = *(v8qi *) & cur[below + x - 1 * next]; l = *(v8qi *) & cur[below + x + 1 * next]; t1 = __builtin_ia32_psubusb(c, j); t2 = __builtin_ia32_psubusb(j, c); t1 = __builtin_ia32_pmaxub(t1, t2); zero ^= zero; score_l = __builtin_ia32_punpcklbw(t1, zero); score_h = __builtin_ia32_punpckhbw(t1, zero); t1 = __builtin_ia32_psubusb(d, k); t2 = __builtin_ia32_psubusb(k, d); t1 = __builtin_ia32_pmaxub(t1, t2); t_l = __builtin_ia32_punpcklbw(t1, zero); t_h = __builtin_ia32_punpckhbw(t1, zero); score_l = __builtin_ia32_paddw(score_l, t_l); score_h = __builtin_ia32_paddw(score_h, t_h); t1 = __builtin_ia32_psubusb(e, l); t2 = __builtin_ia32_psubusb(l, e); t1 = __builtin_ia32_pmaxub(t1, t2); t_l = __builtin_ia32_punpcklbw(t1, zero); t_h = __builtin_ia32_punpckhbw(t1, zero); score_l = __builtin_ia32_paddw(score_l, t_l); score_h = __builtin_ia32_paddw(score_h, t_h); *(v8qi *) & dst[x] = pred; } } #else /// Return the absolute value of an integer. #define ABS(i) ((i) >= 0 ? (i) : (-(i))) /// /// ELA Edge-based Line Averaging /// Low-Complexity Interpolation Method /// /// abcdefg abcdefg abcdefg abcdefg abcdefg /// x x x x x /// hijklmn hijklmn hijklmn hijklmn hijklmn /// static void FilterLineSpatial(uint8_t * dst, const uint8_t * cur, int width, int above, int below, int next) { int a, b, c, d, e, f, g, h, i, j, k, l, m, n; int spatial_pred; int spatial_score; int score; int x; for (x = 0; x < width; ++x) { a = cur[above + x - 3 * next]; // ignore bound violation b = cur[above + x - 2 * next]; c = cur[above + x - 1 * next]; d = cur[above + x + 0 * next]; e = cur[above + x + 1 * next]; f = cur[above + x + 2 * next]; g = cur[above + x + 3 * next]; h = cur[below + x - 3 * next]; i = cur[below + x - 2 * next]; j = cur[below + x - 1 * next]; k = cur[below + x + 0 * next]; l = cur[below + x + 1 * next]; m = cur[below + x + 2 * next]; n = cur[below + x + 3 * next]; spatial_pred = (d + k) / 2; // 0 pixel spatial_score = ABS(c - j) + ABS(d - k) + ABS(e - l); score = ABS(b - k) + ABS(c - l) + ABS(d - m); if (score < spatial_score) { spatial_pred = (c + l) / 2; // 1 pixel spatial_score = score; score = ABS(a - l) + ABS(b - m) + ABS(c - n); if (score < spatial_score) { spatial_pred = (b + m) / 2; // 2 pixel spatial_score = score; } } score = ABS(d - i) + ABS(e - j) + ABS(f - k); if (score < spatial_score) { spatial_pred = (e + j) / 2; // -1 pixel spatial_score = score; score = ABS(e - h) + ABS(f - i) + ABS(g - j); if (score < spatial_score) { spatial_pred = (f + i) / 2; // -2 pixel spatial_score = score; } } dst[x + 0] = spatial_pred; } } #endif /// /// Vaapi spatial deinterlace. /// /// @note FIXME: use common software deinterlace functions. /// static void VaapiSpatial(VaapiDecoder * decoder, VAImage * src, VAImage * dst1, VAImage * dst2) { #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; uint32_t tick6; uint32_t tick7; uint32_t tick8; #endif void *src_base; void *dst1_base; void *dst2_base; unsigned y; unsigned p; uint8_t *tmp; int pitch; int width; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, src->buf, &src_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick2 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst1->buf, &dst1_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick3 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst2->buf, &dst2_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick4 = GetMsTicks(); #endif if (0) { // test all updated memset(dst1_base, 0x00, dst1->data_size); memset(dst2_base, 0xFF, dst2->data_size); } // use tmp copy FIXME: only for intel needed tmp = malloc(src->data_size); memcpy(tmp, src_base, src->data_size); if (src->num_planes == 2) { // NV12 pitch = src->pitches[0]; width = src->width; for (y = 0; y < (unsigned)src->height; y++) { // Y const uint8_t *cur; cur = tmp + src->offsets[0] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[0] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[0] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height ? pitch : -pitch, 1); } else { // copy to 1st memcpy(dst1_base + src->offsets[0] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[0] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height ? pitch : -pitch, 1); } } if (VideoSkipChromaDeinterlace[decoder->Resolution]) { for (y = 0; y < (unsigned)src->height / 2; y++) { // UV const uint8_t *cur; cur = tmp + src->offsets[1] + y * pitch; // copy to 1st memcpy(dst1_base + src->offsets[1] + y * pitch, cur, width); // copy to 2nd memcpy(dst2_base + src->offsets[1] + y * pitch, cur, width); } } else { for (y = 0; y < (unsigned)src->height / 2; y++) { // UV const uint8_t *cur; cur = tmp + src->offsets[1] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[1] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[1] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height / 2 ? pitch : -pitch, 2); } else { // copy to 1st memcpy(dst1_base + src->offsets[1] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[1] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)src->height / 2 ? pitch : -pitch, 2); } } } } else { // YV12 or I420 for (p = 0; p < src->num_planes; ++p) { pitch = src->pitches[p]; width = src->width >> (p != 0); if (VideoSkipChromaDeinterlace[decoder->Resolution] && p) { for (y = 0; y < (unsigned)(src->height >> 1); y++) { const uint8_t *cur; cur = tmp + src->offsets[p] + y * pitch; // copy to 1st memcpy(dst1_base + src->offsets[p] + y * pitch, cur, width); // copy to 2nd memcpy(dst2_base + src->offsets[p] + y * pitch, cur, width); } } else { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y++) { const uint8_t *cur; cur = tmp + src->offsets[p] + y * pitch; if (y & 1) { // copy to 2nd memcpy(dst2_base + src->offsets[p] + y * pitch, cur, width); // create 1st FilterLineSpatial(dst1_base + src->offsets[p] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)(src->height >> (p != 0)) ? pitch : -pitch, 1); } else { // copy to 1st memcpy(dst1_base + src->offsets[p] + y * pitch, cur, width); // create 2nd FilterLineSpatial(dst2_base + src->offsets[p] + y * pitch, cur, width, y ? -pitch : pitch, y + 1 < (unsigned)(src->height >> (p != 0)) ? pitch : -pitch, 1); } } } } } free(tmp); #ifdef DEBUG tick5 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst2->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick6 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst1->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick7 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, src->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick8 = GetMsTicks(); Debug(3, "video/vaapi: map=%2d/%2d/%2d deint=%2d umap=%2d/%2d/%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4, tick6 - tick5, tick7 - tick6, tick8 - tick7); #endif } /// /// Vaapi bob deinterlace. /// /// @note FIXME: use common software deinterlace functions. /// static void VaapiBob(VaapiDecoder * decoder, VAImage * src, VAImage * dst1, VAImage * dst2) { #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; uint32_t tick6; uint32_t tick7; uint32_t tick8; #endif void *src_base; void *dst1_base; void *dst2_base; unsigned y; unsigned p; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, src->buf, &src_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick2 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst1->buf, &dst1_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick3 = GetMsTicks(); #endif if (vaMapBuffer(decoder->VaDisplay, dst2->buf, &dst2_base) != VA_STATUS_SUCCESS) { Fatal("video/vaapi: can't map the image!\n"); } #ifdef DEBUG tick4 = GetMsTicks(); #endif if (0) { // test all updated memset(dst1_base, 0x00, dst1->data_size); memset(dst2_base, 0xFF, dst2->data_size); return; } #if 0 // interleave for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } #endif #if 1 // use tmp copy if (1) { uint8_t *tmp; tmp = malloc(src->data_size); memcpy(tmp, src_base, src->data_size); for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], tmp + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], tmp + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], tmp + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], tmp + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } free(tmp); } #endif #if 0 // use multiple tmp copy if (1) { uint8_t *tmp_src; uint8_t *tmp_dst1; uint8_t *tmp_dst2; tmp_src = malloc(src->data_size); memcpy(tmp_src, src_base, src->data_size); tmp_dst1 = malloc(src->data_size); tmp_dst2 = malloc(src->data_size); for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(tmp_dst1 + src->offsets[p] + (y + 0) * src->pitches[p], tmp_src + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst1 + src->offsets[p] + (y + 1) * src->pitches[p], tmp_src + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst2 + src->offsets[p] + (y + 0) * src->pitches[p], tmp_src + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(tmp_dst2 + src->offsets[p] + (y + 1) * src->pitches[p], tmp_src + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } memcpy(dst1_base, tmp_dst1, src->data_size); memcpy(dst2_base, tmp_dst2, src->data_size); free(tmp_src); free(tmp_dst1); free(tmp_dst2); } #endif #if 0 // dst1 first for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst1_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); memcpy(dst1_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 0) * src->pitches[p], src->pitches[p]); } } // dst2 next for (p = 0; p < src->num_planes; ++p) { for (y = 0; y < (unsigned)(src->height >> (p != 0)); y += 2) { memcpy(dst2_base + src->offsets[p] + (y + 0) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); memcpy(dst2_base + src->offsets[p] + (y + 1) * src->pitches[p], src_base + src->offsets[p] + (y + 1) * src->pitches[p], src->pitches[p]); } } #endif #ifdef DEBUG tick5 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst2->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick6 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, dst1->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick7 = GetMsTicks(); #endif if (vaUnmapBuffer(decoder->VaDisplay, src->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap image buffer\n")); } #ifdef DEBUG tick8 = GetMsTicks(); Debug(4, "video/vaapi: map=%2d/%2d/%2d deint=%2d umap=%2d/%2d/%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4, tick6 - tick5, tick7 - tick6, tick8 - tick7); #endif } /// /// Create software deinterlace images. /// /// @param decoder VA-API decoder /// static void VaapiCreateDeinterlaceImages(VaapiDecoder * decoder) { VAImageFormat format[1]; int i; // NV12, YV12, I420, BGRA // NV12 Y U/V 2x2 // YV12 Y V U 2x2 // I420 Y U V 2x2 // Intel needs NV12 VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); //VaapiFindImageFormat(decoder, PIX_FMT_YUV420P, format); for (i = 0; i < 5; ++i) { if (vaCreateImage(decoder->VaDisplay, format, decoder->InputWidth, decoder->InputHeight, decoder->DeintImages + i) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } #ifdef DEBUG if (1) { VAImage *img; img = decoder->DeintImages; Debug(3, "video/vaapi: %c%c%c%c %dx%d*%d\n", img->format.fourcc, img->format.fourcc >> 8, img->format.fourcc >> 16, img->format.fourcc >> 24, img->width, img->height, img->num_planes); } #endif } /// /// Destroy software deinterlace images. /// /// @param decoder VA-API decoder /// static void VaapiDestroyDeinterlaceImages(VaapiDecoder * decoder) { int i; for (i = 0; i < 5; ++i) { if (vaDestroyImage(decoder->VaDisplay, decoder->DeintImages[i].image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->DeintImages[i].image_id = VA_INVALID_ID; } } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuDerive(VaapiDecoder * decoder, VASurfaceID surface) { // // vaPutImage not working, vaDeriveImage // #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; #endif VAImage image[1]; VAImage dest1[1]; VAImage dest2[1]; VAStatus status; VASurfaceID out1; VASurfaceID out2; #ifdef DEBUG tick1 = GetMsTicks(); #endif #if 0 // get image test if (decoder->Image->image_id == VA_INVALID_ID) { VAImageFormat format[1]; VaapiFindImageFormat(decoder, PIX_FMT_NV12, format); if (vaCreateImage(VaDisplay, format, decoder->InputWidth, decoder->InputHeight, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } if (vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get source image\n")); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } *image = *decoder->Image; #else if ((status = vaDeriveImage(decoder->VaDisplay, surface, image)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } #endif #ifdef DEBUG tick2 = GetMsTicks(); #endif Debug(4, "video/vaapi: %c%c%c%c %dx%d*%d\n", image->format.fourcc, image->format.fourcc >> 8, image->format.fourcc >> 16, image->format.fourcc >> 24, image->width, image->height, image->num_planes); // get a free surfaces out1 = VaapiGetSurface0(decoder); if (out1 == VA_INVALID_ID) { abort(); } if ((status = vaDeriveImage(decoder->VaDisplay, out1, dest1)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); } #ifdef DEBUG tick3 = GetMsTicks(); #endif out2 = VaapiGetSurface0(decoder); if (out2 == VA_INVALID_ID) { abort(); } if ((status = vaDeriveImage(decoder->VaDisplay, out2, dest2)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaDeriveImage failed %d\n"), status); } #ifdef DEBUG tick4 = GetMsTicks(); #endif switch (VideoDeinterlace[decoder->Resolution]) { case VideoDeinterlaceSoftBob: default: VaapiBob(decoder, image, dest1, dest2); break; case VideoDeinterlaceSoftSpatial: VaapiSpatial(decoder, image, dest1, dest2); break; } #ifdef DEBUG tick5 = GetMsTicks(); #endif #if 1 if (vaDestroyImage(VaDisplay, image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } #endif if (vaDestroyImage(VaDisplay, dest1->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } if (vaDestroyImage(VaDisplay, dest2->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } VaapiQueueSurface(decoder, out1, 1); VaapiQueueSurface(decoder, out2, 1); #ifdef DEBUG tick5 = GetMsTicks(); Debug(4, "video/vaapi: get=%2d get1=%2d get2=%d deint=%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4); #endif } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuPut(VaapiDecoder * decoder, VASurfaceID surface) { // // vaPutImage working // #ifdef DEBUG uint32_t tick1; uint32_t tick2; uint32_t tick3; uint32_t tick4; uint32_t tick5; #endif VAImage *img1; VAImage *img2; VAImage *img3; VASurfaceID out; VAStatus status; // // Create deinterlace images. // if (decoder->DeintImages[0].image_id == VA_INVALID_ID) { VaapiCreateDeinterlaceImages(decoder); } if (0 && vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } img1 = decoder->DeintImages; img2 = decoder->DeintImages + 1; img3 = decoder->DeintImages + 2; #ifdef DEBUG tick1 = GetMsTicks(); #endif if (vaGetImage(decoder->VaDisplay, surface, 0, 0, decoder->InputWidth, decoder->InputHeight, img1->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't get source image\n")); VaapiQueueSurface(decoder, surface, 0); VaapiQueueSurface(decoder, surface, 0); return; } #ifdef DEBUG tick2 = GetMsTicks(); #endif // FIXME: handle top_field_first switch (VideoDeinterlace[decoder->Resolution]) { case VideoDeinterlaceSoftBob: default: VaapiBob(decoder, img1, img2, img3); break; case VideoDeinterlaceSoftSpatial: VaapiSpatial(decoder, img1, img2, img3); break; } #ifdef DEBUG tick3 = GetMsTicks(); #endif // get a free surface and upload the image out = VaapiGetSurface0(decoder); if (out == VA_INVALID_ID) { abort(); } if ((status = vaPutImage(VaDisplay, out, img2->image_id, 0, 0, img2->width, img2->height, 0, 0, img2->width, img2->height)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image: %d!\n"), status); abort(); } VaapiQueueSurface(decoder, out, 1); if (0 && vaSyncSurface(decoder->VaDisplay, out) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } #ifdef DEBUG tick4 = GetMsTicks(); Debug(4, "video/vaapi: deint %d %#010x -> %#010x\n", decoder->SurfaceField, surface, out); #endif // get a free surface and upload the image out = VaapiGetSurface0(decoder); if (out == VA_INVALID_ID) { abort(); } if (vaPutImage(VaDisplay, out, img3->image_id, 0, 0, img3->width, img3->height, 0, 0, img3->width, img3->height) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image!\n")); } VaapiQueueSurface(decoder, out, 1); if (0 && vaSyncSurface(decoder->VaDisplay, out) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } #ifdef DEBUG tick5 = GetMsTicks(); Debug(4, "video/vaapi: get=%2d deint=%2d put1=%2d put2=%2d\n", tick2 - tick1, tick3 - tick2, tick4 - tick3, tick5 - tick4); #endif } /// /// Vaapi software deinterlace. /// /// @param decoder VA-API decoder /// @param surface interlaced hardware surface /// static void VaapiCpuDeinterlace(VaapiDecoder * decoder, VASurfaceID surface) { if (decoder->GetPutImage) { VaapiCpuPut(decoder, surface); } else { VaapiCpuDerive(decoder, surface); } // FIXME: must release software input surface } /// /// Render a ffmpeg frame /// /// @param decoder VA-API decoder /// @param video_ctx ffmpeg video codec context /// @param frame frame to display /// static void VaapiRenderFrame(VaapiDecoder * decoder, const AVCodecContext * video_ctx, const AVFrame * frame) { VASurfaceID surface; int interlaced; // FIXME: some tv-stations toggle interlace on/off // frame->interlaced_frame isn't always correct set interlaced = frame->interlaced_frame; if (video_ctx->height == 720) { if (interlaced && !decoder->WrongInterlacedWarned) { Debug(3, "video/vaapi: wrong interlace flag fixed\n"); decoder->WrongInterlacedWarned = 1; } interlaced = 0; } else { if (!interlaced && !decoder->WrongInterlacedWarned) { Debug(3, "video/vaapi: wrong interlace flag fixed\n"); decoder->WrongInterlacedWarned = 1; } interlaced = 1; } // FIXME: should be done by init video_ctx->field_order if (decoder->Interlaced != interlaced || decoder->TopFieldFirst != frame->top_field_first) { #if 0 // field_order only in git Debug(3, "video/vaapi: interlaced %d top-field-first %d - %d\n", interlaced, frame->top_field_first, video_ctx->field_order); #else Debug(3, "video/vaapi: interlaced %d top-field-first %d\n", interlaced, frame->top_field_first); #endif decoder->Interlaced = interlaced; decoder->TopFieldFirst = frame->top_field_first; decoder->SurfaceField = 0; } // update aspect ratio changes #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53,60,100) if (decoder->InputWidth && decoder->InputHeight && av_cmp_q(decoder->InputAspect, frame->sample_aspect_ratio)) { Debug(3, "video/vaapi: aspect ratio changed\n"); decoder->InputAspect = frame->sample_aspect_ratio; VaapiUpdateOutput(decoder); } #else if (decoder->InputWidth && decoder->InputHeight && av_cmp_q(decoder->InputAspect, video_ctx->sample_aspect_ratio)) { Debug(3, "video/vaapi: aspect ratio changed\n"); decoder->InputAspect = video_ctx->sample_aspect_ratio; VaapiUpdateOutput(decoder); } #endif // // Hardware render // if (video_ctx->hwaccel_context) { if (video_ctx->height != decoder->InputHeight || video_ctx->width != decoder->InputWidth) { Error(_("video/vaapi: stream <-> surface size mismatch\n")); return; } surface = (unsigned)(size_t) frame->data[3]; Debug(4, "video/vaapi: hw render hw surface %#010x\n", surface); if (interlaced && VideoDeinterlace[decoder->Resolution] >= VideoDeinterlaceSoftBob) { VaapiCpuDeinterlace(decoder, surface); } else { VaapiQueueSurface(decoder, surface, 0); } // // VAImage render // } else { void *va_image_data; int i; AVPicture picture[1]; int width; int height; Debug(4, "video/vaapi: hw render sw surface\n"); width = video_ctx->width; height = video_ctx->height; // // Check image, format, size // if ((decoder->GetPutImage && decoder->Image->image_id == VA_INVALID_ID) || decoder->PixFmt != video_ctx->pix_fmt || width != decoder->InputWidth || height != decoder->InputHeight) { Debug(3, "video/vaapi: stream <-> surface size/interlace mismatch\n"); decoder->PixFmt = video_ctx->pix_fmt; // FIXME: aspect done above! decoder->InputWidth = width; decoder->InputHeight = height; VaapiSetup(decoder, video_ctx); } // FIXME: Need to insert software deinterlace here // FIXME: can/must insert auto-crop here (is done after upload) // get a free surface and upload the image surface = VaapiGetSurface0(decoder); Debug(4, "video/vaapi: video surface %#010x displayed\n", surface); if (!decoder->GetPutImage && vaDeriveImage(decoder->VaDisplay, surface, decoder->Image) != VA_STATUS_SUCCESS) { VAImageFormat format[1]; Error(_("video/vaapi: vaDeriveImage failed\n")); decoder->GetPutImage = 1; VaapiFindImageFormat(decoder, decoder->PixFmt, format); if (vaCreateImage(VaDisplay, format, width, height, decoder->Image) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't create image!\n")); } } // // Copy data from frame to image // if (vaMapBuffer(VaDisplay, decoder->Image->buf, &va_image_data) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't map the image!\n")); } // crazy: intel mixes YV12 and NV12 with mpeg if (decoder->Image->format.fourcc == VA_FOURCC_NV12) { int x; // intel NV12 convert YV12 to NV12 // copy Y for (i = 0; i < height; ++i) { memcpy(va_image_data + decoder->Image->offsets[0] + decoder->Image->pitches[0] * i, frame->data[0] + frame->linesize[0] * i, frame->linesize[0]); } // copy UV for (i = 0; i < height / 2; ++i) { for (x = 0; x < width / 2; ++x) { ((uint8_t *) va_image_data)[decoder->Image->offsets[1] + decoder->Image->pitches[1] * i + x * 2 + 0] = frame->data[1][i * frame->linesize[1] + x]; ((uint8_t *) va_image_data)[decoder->Image->offsets[1] + decoder->Image->pitches[1] * i + x * 2 + 1] = frame->data[2][i * frame->linesize[2] + x]; } } // vdpau uses this } else if (decoder->Image->format.fourcc == VA_FOURCC('I', '4', '2', '0')) { picture->data[0] = va_image_data + decoder->Image->offsets[0]; picture->linesize[0] = decoder->Image->pitches[0]; picture->data[1] = va_image_data + decoder->Image->offsets[1]; picture->linesize[1] = decoder->Image->pitches[2]; picture->data[2] = va_image_data + decoder->Image->offsets[2]; picture->linesize[2] = decoder->Image->pitches[1]; av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt, width, height); } else if (decoder->Image->num_planes == 3) { picture->data[0] = va_image_data + decoder->Image->offsets[0]; picture->linesize[0] = decoder->Image->pitches[0]; picture->data[1] = va_image_data + decoder->Image->offsets[2]; picture->linesize[1] = decoder->Image->pitches[2]; picture->data[2] = va_image_data + decoder->Image->offsets[1]; picture->linesize[2] = decoder->Image->pitches[1]; av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt, width, height); } if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't unmap the image!\n")); } Debug(4, "video/vaapi: buffer %dx%d <- %dx%d\n", decoder->Image->width, decoder->Image->height, width, height); if (decoder->GetPutImage && (i = vaPutImage(VaDisplay, surface, decoder->Image->image_id, 0, 0, width, height, 0, 0, width, height)) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't put image err:%d!\n"), i); } if (!decoder->GetPutImage) { if (vaDestroyImage(VaDisplay, decoder->Image->image_id) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: can't destroy image!\n")); } decoder->Image->image_id = VA_INVALID_ID; } VaapiQueueSurface(decoder, surface, 1); } if (decoder->Interlaced) { ++decoder->FrameCounter; } } /// /// Get hwaccel context for ffmpeg. /// /// @param decoder VA-API hw decoder /// static void *VaapiGetHwAccelContext(VaapiDecoder * decoder) { return decoder->VaapiContext; } /// /// Advance displayed frame of decoder. /// /// @param decoder VA-API hw decoder /// static void VaapiAdvanceDecoderFrame(VaapiDecoder * decoder) { // next surface, if complete frame is displayed (1 -> 0) if (decoder->SurfaceField) { VASurfaceID surface; int filled; filled = atomic_read(&decoder->SurfacesFilled); // FIXME: this should check the caller // check decoder, if new surface is available if (filled <= 1) { // keep use of last surface ++decoder->FramesDuped; // FIXME: don't warn after stream start, don't warn during pause Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"), decoder->FramesDuped, decoder->FrameCounter, VideoGetBuffers(decoder->Stream)); return; } // wait for rendering finished surface = decoder->SurfacesRb[decoder->SurfaceRead]; if (vaSyncSurface(decoder->VaDisplay, surface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX; atomic_dec(&decoder->SurfacesFilled); // progressiv oder software deinterlacer decoder->SurfaceField = !decoder->Interlaced || VideoDeinterlace[decoder->Resolution] >= VideoDeinterlaceSoftBob; return; } decoder->SurfaceField = 1; } /// /// Display a video frame. /// /// @todo FIXME: add detection of missed frames /// static void VaapiDisplayFrame(void) { struct timespec nowtime; #ifdef DEBUG uint32_t start; uint32_t put1; uint32_t put2; #endif int i; VaapiDecoder *decoder; if (VideoSurfaceModesChanged) { // handle changed modes VideoSurfaceModesChanged = 0; for (i = 0; i < VaapiDecoderN; ++i) { VaapiInitSurfaceFlags(VaapiDecoders[i]); } } // look if any stream have a new surface available for (i = 0; i < VaapiDecoderN; ++i) { VASurfaceID surface; int filled; decoder = VaapiDecoders[i]; decoder->FramesDisplayed++; decoder->StartCounter++; #ifdef VA_EXP // wait for display finished if (decoder->LastSurface != VA_INVALID_ID) { if (vaSyncSurface(decoder->VaDisplay, decoder->LastSurface) != VA_STATUS_SUCCESS) { Error(_("video/vaapi: vaSyncSurface failed\n")); } } #endif filled = atomic_read(&decoder->SurfacesFilled); // no surface availble show black with possible osd if (!filled) { VaapiBlackSurface(decoder); #ifdef VA_EXP decoder->LastSurface = decoder->BlackSurface; #endif VaapiMessage(3, "video/vaapi: black surface displayed\n"); continue; } surface = decoder->SurfacesRb[decoder->SurfaceRead]; #ifdef VA_EXP decoder->LastSurface = surface; #endif #ifdef DEBUG if (surface == VA_INVALID_ID) { printf(_("video/vaapi: invalid surface in ringbuffer\n")); } Debug(4, "video/vaapi: yy video surface %#010x displayed\n", surface); start = GetMsTicks(); #endif // VDPAU driver + INTEL driver does no v-sync with 1080 if (0 && decoder->Interlaced // FIXME: buggy libva-driver-vdpau, buggy libva-driver-intel && (VaapiBuggyVdpau || (0 && VaapiBuggyIntel && decoder->InputHeight == 1080)) && VideoDeinterlace[decoder->Resolution] != VideoDeinterlaceWeave) { VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, 0); #ifdef DEBUG put1 = GetMsTicks(); #endif VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, 1); #ifdef DEBUG put2 = GetMsTicks(); #endif } else { #ifdef USE_GLX if (GlxEnabled) { VaapiPutSurfaceGLX(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, decoder->SurfaceField); } else #endif { VaapiPutSurfaceX11(decoder, surface, decoder->Interlaced, decoder->TopFieldFirst, decoder->SurfaceField); } #ifdef DEBUG put1 = GetMsTicks(); put2 = put1; #endif } clock_gettime(CLOCK_MONOTONIC, &nowtime); // FIXME: 31 only correct for 50Hz if ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec) > 31 * 1000 * 1000) { // FIXME: ignore still-frame, trick-speed Debug(3, "video/vaapi: time/frame too long %ldms\n", ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec)) / (1000 * 1000)); Debug(4, "video/vaapi: put1 %2u put2 %2u\n", put1 - start, put2 - put1); } #ifdef noDEBUG Debug(3, "video/vaapi: time/frame %ldms\n", ((nowtime.tv_sec - decoder->FrameTime.tv_sec) * 1000 * 1000 * 1000 + (nowtime.tv_nsec - decoder->FrameTime.tv_nsec)) / (1000 * 1000)); if (put2 > start + 20) { Debug(3, "video/vaapi: putsurface too long %ums\n", put2 - start); } Debug(4, "video/vaapi: put1 %2u put2 %2u\n", put1 - start, put2 - put1); #endif decoder->FrameTime = nowtime; } #ifdef USE_GLX if (GlxEnabled) { // // add OSD // if (OsdShown) { GlxRenderTexture(OsdGlTextures[OsdIndex], 0, 0, VideoWindowWidth, VideoWindowHeight); // FIXME: toggle osd } //glFinish(); glXSwapBuffers(XlibDisplay, VideoWindow); GlxCheck(); //glClearColor(1.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT); } #endif } /// /// Set VA-API decoder video clock. /// /// @param decoder VA-API hardware decoder /// @param pts audio presentation timestamp /// void VaapiSetClock(VaapiDecoder * decoder, int64_t pts) { decoder->PTS = pts; } /// /// Get VA-API decoder video clock. /// /// @param decoder VA-API decoder /// static int64_t VaapiGetClock(const VaapiDecoder * decoder) { // pts is the timestamp of the latest decoded frame if (decoder->PTS == (int64_t) AV_NOPTS_VALUE) { return AV_NOPTS_VALUE; } // subtract buffered decoded frames if (decoder->Interlaced) { return decoder->PTS - 20 * 90 * (2 * atomic_read(&decoder->SurfacesFilled) - decoder->SurfaceField); } return decoder->PTS - 20 * 90 * (atomic_read(&decoder->SurfacesFilled) + 2); } /// /// Set VA-API decoder closing stream flag. /// /// @param decoder VA-API decoder /// static void VaapiSetClosing(VaapiDecoder * decoder) { decoder->Closing = 1; } /// /// Reset start of frame counter. /// /// @param decoder VA-API decoder /// static void VaapiResetStart(VaapiDecoder * decoder) { decoder->StartCounter = 0; } /// /// Set trick play speed. /// /// @param decoder VA-API decoder /// @param speed trick speed (0 = normal) /// static void VaapiSetTrickSpeed(VaapiDecoder * decoder, int speed) { decoder->TrickSpeed = speed; decoder->TrickCounter = speed; if (speed) { decoder->Closing = 0; } } /// /// Get VA-API decoder statistics. /// /// @param decoder VA-API decoder /// @param[out] missed missed frames /// @param[out] duped duped frames /// @param[out] dropped dropped frames /// @param[out] count number of decoded frames /// void VaapiGetStats(VaapiDecoder * decoder, int *missed, int *duped, int *dropped, int *counter) { *missed = decoder->FramesMissed; *duped = decoder->FramesDuped; *dropped = decoder->FramesDropped; *counter = decoder->FrameCounter; } /// /// Sync decoder output to audio. /// /// trick-speed show frame times /// still-picture show frame until new frame arrives /// 60hz-mode repeat every 5th picture /// video>audio slow down video by duplicating frames /// video