PenguinTV-4.2.0/0000755000000000000000000000000011450721611010260 5ustar PenguinTV-4.2.0/share/0000755000000000000000000000000011450514774011374 5ustar PenguinTV-4.2.0/share/mozilla-planet.css0000644000000000000000000000312211113040105015006 0ustar body { margin-left: 24px; margin-right: 24px; // background-color: #fffe92; } .heading { background-color: #f0f0ff; border-width:1px; border-style: solid; padding:12pt; margin:12pt; } //#nav_bar { // border-width: 2px; //border-style: solid; //} .feedtitle { font-size: x-large; font-weight: bold; text-align: center; padding: 12pt; } .entrynew { border-width: 3px; border-style: solid; border-color: #333; background-color: #f5f7fa; margin-top: 12pt; padding: 6pt; overflow: auto; } .entryold { border-width: 2px; border-style: solid; border-color: #777; background-color: #e0e3e8; margin-top: 12pt; padding: 6pt; overflow: auto; } .stitle { font-size: large; font-weight:bold; // font-family: Sans-Serif; padding-bottom:20pt; } .sdate { font-size:8pt; color: #444; font-style: italic; } .content { padding-left:20pt; margin-top:12pt; } .media { background-color:#ddd; border-color:#000000; border-width:2px; border-style: solid; //padding:8pt; margin:8pt; overflow: auto; } .media a img { border: none ; } dd { padding-left: 20pt; } q { font-style: italic;} blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } .blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } PenguinTV-4.2.0/share/defaultsubs.opml0000644000000000000000000000220411070717662014603 0ustar All PenguinTV-4.2.0/share/penguintv.glade.h0000644000000000000000000001342110646750255014642 0ustar char *s = N_(" "); char *s = N_(" megabytes"); char *s = N_(" minutes"); char *s = N_("(c)2007 Owen Williams"); char *s = N_("0 bytes"); char *s = N_("Add Feed"); char *s = N_("Add Search Tag"); char *s = N_("BitTorrent Options"); char *s = N_("Create Feed Filter"); char *s = N_("Feed Filter Properties"); char *s = N_("General Options"); char *s = N_("Login Required"); char *s = N_("Modify Search Tag"); char *s = N_("Options"); char *s = N_("Refresh Mode"); char *s = N_("Synchronization Preview"); char *s = N_("Synchronizing Media"); char *s = N_("Copying..."); char *s = N_("A username and password are required for this feed:"); char *s = N_("Add Feed"); char *s = N_("Add Search Tag"); char *s = N_("Add _Feed Filter..."); char *s = N_("All Feeds"); char *s = N_("Always show notification icon"); char *s = N_("Automatically download media"); char *s = N_("Automatically download new media"); char *s = N_("Autotune refresh period"); char *s = N_("Browse..."); char *s = N_("Clear Search"); char *s = N_("Convert newline characters to HTML paragraph breaks"); char *s = N_("Copy _audio files only"); char *s = N_("Create Feed Filter"); char *s = N_("Delete _files from the database after copying"); char *s = N_("Description"); char *s = N_("Destination"); char *s = N_("Download Unviewed Media"); char *s = N_("Download _Unviewed Media"); char *s = N_("E_ntry"); char *s = N_("Edit Favorite Tags"); char *s = N_("Edit Saved Searches"); char *s = N_("Edit Tags"); char *s = N_("Fee_d"); char *s = N_("Feed Filter Properties"); char *s = N_("Feed Properties"); char *s = N_("Filtered Feed Name:"); char *s = N_("Hide Viewed"); char *s = N_("Homepage"); char *s = N_("If unchecked, media may be deleted to free up space"); char *s = N_("Include this feed in search results"); char *s = N_("Last Poll"); char *s = N_("Limit total disk usage to "); char *s = N_("Login Required"); char *s = N_("Mark As _Unviewed"); char *s = N_("Mark As _Viewed"); char *s = N_("Maximum port"); char *s = N_("Minimum port"); char *s = N_("Modify"); char *s = N_("Modify Search Tag"); char *s = N_("Never delete media automatically"); char *s = N_("Next Poll"); /* TRANSLATORS: Replace this string with your names, one name per line. */ char *s = N_("None yet"); char *s = N_("Options"); char *s = N_("Original Feed Name: "); char *s = N_("Password: "); char *s = N_("Pause Selected Downloads"); char *s = N_("PenguinTV"); char *s = N_("PenguinTV Sourceforge Site"); char *s = N_("Please enter the URL of the feed you would like to add; and any tags you would like to apply to this feed, separated by commas:"); char *s = N_("Please enter the name for the new filtered feed, and the search terms you'd like to filter for"); char *s = N_("Please enter the search terms you'd like to save, and a name for this search."); char *s = N_("Podcast and Video Blog aggregator for GTK+ and GNOME"); char *s = N_("Poll all feeds on startup"); char *s = N_("Preferences"); char *s = N_("RSS Feed"); char *s = N_("Re_fresh"); char *s = N_("Re_fresh Search Indexes"); char *s = N_("Re_name"); char *s = N_("Refresh Feeds"); char *s = N_("Refresh Feeds with _Errors"); char *s = N_("Refresh every "); char *s = N_("Remove Feed"); char *s = N_("Resume"); char *s = N_("Resume Selected Downloads"); char *s = N_("Resume _All"); char *s = N_("Resume downloads on startup"); char *s = N_("Save Search..."); char *s = N_("Search Feeds and Entries"); char *s = N_("Search Terms:"); char *s = N_("Select a destination where all the downloaded PenguinTV media will be copied. If the destination is an mp3 player, make sure the mp3 player is connected before synchronizing."); char *s = N_("Select a tag in the list on the left, and all the feeds with that tag will be marked on the right. You may mark and unmark feeds to add or remove that tag from them.\n" "\n" "Tagged feeds will appear at the top of the list."); char *s = N_("Show _Notifications"); char *s = N_("Show notifications when this feed updates"); char *s = N_("Stop Selected Downloads"); char *s = N_("Synchonization Preview"); char *s = N_("Synchronize"); char *s = N_("Synchronizing"); char *s = N_("TEMPORARY ICOPN"); char *s = N_("Tag Editor"); char *s = N_("Tag Editor..."); char *s = N_("Tag Highlighter:"); char *s = N_("Tag name: "); char *s = N_("Tags:"); char *s = N_("Tags: "); char *s = N_("Title"); char *s = N_("URL: "); char *s = N_("Upload rate limit"); char *s = N_("User Name: "); char *s = N_("Using: "); char *s = N_("Work around feeds where entries appear to be in one large paragraph"); char *s = N_("You can drag tags from the righthand side to the favorites list on the left. To remove a favorite, drag it from the lefthand side back to the right."); char *s = N_("You may change the name of this search as well as the search terms."); char *s = N_("You may click on the items in the list to edit tags."); char *s = N_("_Add Feed..."); char *s = N_("_Delete All Media"); char *s = N_("_Delete Media"); char *s = N_("_Delete files on destination that are no longer in the database"); char *s = N_("_Download Media"); char *s = N_("_Export Subscriptions..."); char *s = N_("_Fancy Feed Display"); char *s = N_("_Favorite Tags..."); char *s = N_("_Go"); char *s = N_("_Horizontal Layout"); char *s = N_("_Import Subscriptions..."); char *s = N_("_Mark As Viewed"); char *s = N_("_Planet Style Layout"); char *s = N_("_Play Media"); char *s = N_("_Preview"); char *s = N_("_Properties"); char *s = N_("_Refresh Feeds"); char *s = N_("_Remove Feed"); char *s = N_("_Saved Searches..."); char *s = N_("_Show Today's Downloads..."); char *s = N_("_Standard Layout"); char *s = N_("_Synchronize"); char *s = N_("_Synchronize Media..."); char *s = N_("_Vertical Layout"); char *s = N_("_View"); char *s = N_("a"); char *s = N_("unknown"); PenguinTV-4.2.0/share/penguintvicon.png0000644000000000000000000002235410646750255015002 0ustar PNG  IHDR>abKGD pHYs  tIME#D[ IDATxy]E?Ug{޳tvda`ADVqPQQP6ep@GefPqQ@G("E@%@$IwolU?Mh {穧o9ֻU[0,2,^]޺Zg[xŌa;0X|+f 0 P wzJ/ykY,斿y9 ?,bF'f`Kb>ÀG'd={X_G/>ou#_:2_К/ktڊ_]%+~_Z+Wv6){dYzN/~< [>?̐roqOJk}nahsLYqW]]mֵ]~S︽nYw ?@ k[bWKktK?r~HE'NijjR)(0Xgy|}^B G ^-;lB?.dʚv-wApzLjՈ[1Mӵm{ss=>0 e{|拟z$xrmn] ׆T矗w܇ mJAP8 Me}{Μ9Zx`$om=|'7ع/Eեفۼ&ȷ7\kC.d'<ϻ]k]iYӧOgԩضMKK Vb۶ӸRJa׸,I$xZ˷opɒ$ |M+zKh²#@X @@}}=s ~8an:,Y¦M嚆aDri"pp@_/tn ?~%{NM+ǖue ?~bxvKڀbH[[vq,k*F ϟog&q#A Y?qBE^O͹b?<)voݍ+ ̓.gu|[R}|L&C6E)EKK ]]]yIw!]=GO8O􋫷5Lzu&yE8uO!^;[f755VS2 N|өS,8-iJi^[_?_zeLd H"x:Lf)|_ 93=V,Ua+v`nչ~ɕw t]WxA@H)gr~ H+B!mRBk;0#=vj;f9 /4RIߙC /@3Bݎ)wr{.:{bV ÎbDbU {_=_+ dVfοzbFkh];:W@+EuxV#{gHVvV$U}~]WyCRx7CGkb=aǪ ' $*4j5KE^"# A*G؎iy KT@I=3t}pW]̦zHh۶?]}cN>re(>BG^hУrJ ݋gȕN9(+ R%#h||3 0@Igxh I`(\@Io FBaICg e]Br#@A A5";"%%3魀|"l!1Zh n@h _Fm?e(a}(JHI˨n!ޤ꽴k H x-e %2Z!iDP:j:(kJXV(eһ AƈX57odt bNl{^C~!bWGv0옴"&1(Ҍ -ii"M ! BUaHd3Z"hí:-!rBȝy=zk"Fߧо9߶M6J)؛E6 )}aaNjvREuVjt$VV7XT VH 3f$:cyZkti&ٛ@ХDkbpZZ2C@.adznc-jۯFx~rh[*+uks0׎S鱇%SXhd5tv/N"zs3*BPVX~LDzuSӴ t}5-ٕAY}z$74 H$N 3L yM f4"@!:Wˊ'AGQ&NȘ1cFAi︊e*-&bL?n< L!Le p#RH#\683(L͚5iӦN Wi7a 6k;b5cV$XM}ċ V2$WD @6O<1J$ =XΝKmm-8.+En< iITjƼ| HR v4iĕ`6/|YgkRH$ϩJCC۶m{S}`p]4Tv7 RQˈ 8 銤&NӨpJ6B瞧}aPpg 5;~SSWX .!BgWdSIO9xUZ*&P Tz/W rlܸg}zӭ*P jaQZi\*EZ-˰#$jǎTx ,aj bxif;i_4eww5Zc{xW^y 4[=B҇ b b b{>,hvƼ\8p` PZk[M:RL^h+f1'i,/gp̱gmWF4 gVo|ήA%q+ 7)< ]-Í-0f!$qU^[\7V<$j`ҥQJT{h8?ȑ:0@'|X,5cVV =2_Jf;*R N}LzZ$Z=@j>xQ@M4{)/"L;N;Nŋ%8`t~wb3z 'M7("QS1h';VI )pգ<ϻ}PgO㥥tSOG*'~LHPywk!-QpOo_ONU7I8#5 |3g_tR3Z #K1?``Mp: v領mwdqqy}P(|FWL/N{'Rb۸,ZF"5zJ\T=m]Mn3j{տj&nhMCFvlۛ^S^-/=wygEgg=A=A0\MWJh 4Dj?1j vzm+dbEߩ wpJL*bw̍Ǭ+#۲?OnV:BRiYDbbHAUG7QYkv̬td~.*'jFF^QufWƖĈI+u͆I >{̶HHNq)rZ#]Eѹi=O}kҨ#prn@Īm+f\BmW||0c {ڵSJ]"6%1jJK/z6L[ H#Ê1#qA(>ki1eJ#$An"nY/'!Z0F` Y{:`/`ƍZG6i}q&;lh־_|wӽ1 4-eq!0# &BXQa !iiqa7G>a .R:Y$ڨ}'˽}&cW!.NIK0=b %a*C*][yٹ ,.:B`RRc QlرqBݭk ;oPJ̰쐞MfJUf,BL=٬ 5@" m*P1szVGֿ]jґh= ,/Ê\Ƚ3 ('(aߐsȴvC= ]xN~\bqs]_u[{pg0olkG]z饕;y[k !ZkikmZ?={gp RaayhgNBbO뷄aΫܩnwO{Zzioo_˰Y ;]NmUSo]]OXWl~79=mk=d8ۍ/*n8qbD@5^>AP6*`SSӥ֭~jP*/f_]~5^J H}{_͓p߿fN1W3мQvyZ;ؒ4t]~wS\*C@o=G+ߧ`XmۇۮF*w,d2Y{Q}H@ B0{%w/tv!C"GûgQXﲹ#-L63 餪͋?fHi# Q#?n= b Hm*wI'@m5wbH8D&vniOOAb.g #kN2F "qHG1i}^g,e3@%IX^l!Q 7nddD|G TW;QDq[vbceO<S&* pRp0 @$ ֶnp' ~un>𩧥1#_h]#vljEu O Jjx4 JTXQi|_F fY^:z>z?=m?rɈ tuL[Zelq+v١";F):*&JIU iH'$U2ȯ8wòߝVVN`OS5MgfؕԤcv4!XLC" 7F-  KUzf.M@̯DK [}̜,ޅ5B )Z{ R ” nq'v MJ(&" 0of 6BBxMM۠+YV\<) EM`a3Δ0>Zh (VNˠ/6~=8h[ /[\[*)[wy2 l_])@=i:ozdŒN7<@V}sE]pR?jeJv Secx1-J-m&H_` GΘg!AԴ@iAi'.tᴈ^CN=w7KuZ@+RjPV\=enPoV;#&̛9 ![Z7;WNHRy7ja"Jm$.>c{A*7 hp Ĕ4O*Vuf^@U ULv,ƎN }#^?ϻݺjP0bI'r/tTWxKh[SG[{IzF 崏4ȳioW߷{2Y3h -mbwoHxY6])h JKI F# PĐ[mIDAT^q`L-O˵gnhD;4>J}!@CyNg0mowZYD'}AHT JW$l6VΝX4H8ꋛֽ%o^%{i+ϵ[/wqJi|_yӸnhܢs|OM@N䌲 @\v̜3I|Ojv}W! ﮈBsl|_x.ES b 9b yM p 9>'Qxz[Pz}*VCV70lYGpy:z>{/(Q@)J'{V)RxJJ rO8sNeKWFgƋf0uϊC ߔdX Dt@RF~h}v͘w_jǿw@;rrb3vzODZF_g֡jӧX|3QTA 4(4~F|p!Vd/~ļO>R3n~amZ}Wg~pI\ʹa[O,q ~MJY_" *_񔁧J3òɳ$#rx~sUs䋑̳߭ws#?(L ᾯFŪ(9;|d;N_ aDY9H&xѱZ KnwXr=L| PH^dÇ%cf}, 1%X?|3O{s.Gc JoChy3:vabW7)ƻr1bx揟?O):V%_ s3~;\3,jW8/ēߟqb} t !{%wdqv+ӹ*}7|xjb3XF3KNFPQ7 xԞ.U`` /7QY"Lu%Hv9RK+Bg'1Yͼqo3tFFFF9J>Bd]?]S :IENDB`PenguinTV-4.2.0/share/penguintv.glade0000644000000000000000000101277111450514773014421 0ustar Add Feed False GTK_WIN_POS_CENTER_ON_PARENT GDK_WINDOW_TYPE_HINT_DIALOG True 6 6 True True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True 15 18 True 0 GTK_SHADOW_NONE True True 11 Please enter the URL of the feed you would like to add; and any tags you would like to apply to this feed, separated by commas: True True <b>Add Feed</b> True label_item 10 True True URL: False False True True * 1 False False 1 True True Tags: False False True True * 1 2 True Feed Properties tab False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True 12 5 True True Automatically download media True 0 True True False False True True Include this feed in search results True 0 True True False False 1 True True Show notifications when this feed updates True 0 True False False 2 True True If unchecked, media may be deleted to free up space Never delete media automatically True 0 True False False 3 True True Convert newline characters to HTML paragraph breaks Work around feeds where entries appear to be in one large paragraph True 0 True False False 4 True True Automatically mark new entries as read True 0 True False False 5 1 True Options tab 1 False True False 1 True True False True True 6 gtk-cancel True 0 False False 1 True True 6 gtk-add True 0 False False 2 False 2 5 GDK_WINDOW_TYPE_HINT_NORMAL (c)2008 Owen Williams Podcast and Video Blog aggregator for GTK+ and GNOME http://penguintv.sourceforge.net PenguinTV Sourceforge Site Owen Williams <owen@ywwg.com> With help from: Daniel Arbuckle <djarb@highenergymagic.org> j@v2v.cc None yet penguintvicon.png False GTK_PACK_END Login Required GDK_WINDOW_TYPE_HINT_DIALOG True True 15 18 True 0 GTK_SHADOW_NONE True True 11 A username and password are required for this feed: True True <b>Login Required</b> True label_item False False 10 True True User Name: False False True True * 1 False False 1 True True Password: False False True True False * True 1 2 2 True GTK_BUTTONBOX_END True True True gtk-cancel True -6 True True True gtk-ok True -5 1 False GTK_PACK_END Add Search Tag False GTK_WIN_POS_CENTER_ON_PARENT GDK_WINDOW_TYPE_HINT_DIALOG True 15 18 True 0 GTK_SHADOW_NONE True True 11 Please enter the search terms you'd like to save, and a name for this search. True True <b>Add Search Tag</b> True label_item 10 True True Tag name: False False True True * 1 1 True True Search Terms: False False True True * 1 False False 2 True True 0 GTK_SHADOW_NONE True 12 True True 6 gtk-cancel True 0 False False 1 True True 6 gtk-add True 0 False False 2 False False 3 Edit Saved Searches GTK_WIN_POS_CENTER True 12 True True GTK_SHADOW_IN True True True 12 5 True 6 stock_dialog-info False True 0 You may click on the items in the list to edit tags. 1 True True 0 True 0 0 True 2 True gtk-properties False False True Modify True False False 1 False False 2 True True gtk-remove True 0 False False 3 True True gtk-add True 0 False False 4 False 1 True False 6 2 True True True True gtk-close True 0 False False 1 False 6 3 Create Feed Filter False GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0 Please enter the name for the new filtered feed, and the search terms you'd like to filter for True 6 True 12 3 2 6 6 True 0 Search Terms: 2 3 GTK_FILL 200 True True 1 2 2 3 True 0 Filtered Feed Name: 1 2 GTK_FILL 200 True True 1 2 1 2 True 0 Original Feed Name: GTK_FILL True 0 unknown 1 2 GTK_FILL 1 True <b>Create Feed Filter</b> True label_item True False 1 True True True True gtk-cancel True 0 False False 1 True True gtk-add True 0 False False 2 False 6 2 True window1 True True False True True Search Feeds and Entries True True Search Feeds and Entries 0 True gtk-find False False 1 True True Clear Search 0 True gtk-clear False False 2 True True 0 True 0 0 True 2 True gtk-save False False True Save Search... True False False 1 False False 3 False False True True Show: False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag True All Feeds False 1 True True 5 Hide Viewed True 0 True False False 2 False 1 True True 124 True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False False True True True 315 True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False False True True True True True True 2 True window2 True True True True True False True True Search Feeds and Entries True True Search Feeds and Entries 0 True gtk-find False False 1 True True Clear Search 0 True gtk-clear False False 2 True True 0 True 0 0 True 2 True gtk-save False False True Save Search... True False False 1 False False 3 False False True True Show: False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag True Show only feeds with this property or tag All Feeds False 1 True True 5 Hide Viewed True 0 True False False 2 False 1 True True 124 True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False False True True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False True True 2 False True True True True True window3 True True False 400 True True Search Feeds and Entries False False True True Search Feeds and Entries 0 True gtk-find False False 1 True True Clear Search 0 True gtk-clear False False 2 True True 0 True 0 0 True 2 True gtk-save False False True Save Search... True False False 1 False False 3 True 4 False False True True 1 True True True Show: False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True All Feeds False 1 True True 5 Hide Viewed True 0 True False False 2 False True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False 1 False True True True True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False False True True True True True True 1 Feed Properties 400 GDK_WINDOW_TYPE_HINT_DIALOG True 6 6 True True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True 12 True 6 7 2 12 6 True True * 1 2 3 4 True 0 Tags: 3 4 GTK_FILL True 0 True 1 2 4 5 GTK_FILL True 0 6 Description 4 5 GTK_FILL True 0 1 2 5 6 GTK_FILL True 0 Last Poll 5 6 GTK_FILL True 0 1 2 6 7 GTK_FILL True 0 Next Poll 6 7 GTK_FILL True True 1 2 2 3 True True 1 2 1 2 True True 1 2 True 0 Homepage 2 3 GTK_FILL True 0 RSS Feed 1 2 GTK_FILL True 0 Title GTK_FILL True Feed Properties tab False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True 12 5 True True Automatically download media True 0 True True False False True True Include this feed in search results True 0 True True False False 1 True True Show notifications when this feed updates True 0 True False False 2 True True If unchecked, media may be deleted to free up space Never delete media automatically True 0 True False False 3 True True Convert newline characters to HTML paragraph breaks Work around feeds where entries appear to be in one large paragraph True 0 True False False 4 True True Automatically mark new entries as read True 0 True False False 5 1 True Options tab 1 False True False 1 True True True True gtk-revert-to-saved True 0 False False 1 True True gtk-close True 0 False False 2 False 2 Feed Filter Properties False GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0 Please enter the name for the new filtered feed, and the search terms you'd like to filter for True 6 True 12 4 2 6 6 True 0 Search Terms: 2 3 GTK_FILL 200 True True 1 2 2 3 True 0 Filtered Feed Name: 1 2 GTK_FILL 200 True True 1 2 1 2 True 0 Original Feed Name: GTK_FILL True 0 unknown 1 2 GTK_FILL True 0 Tags: 3 4 GTK_FILL True True * 1 2 3 4 1 True <b>Feed Filter Properties</b> True label_item True False 1 True True True True gtk-revert-to-saved True 0 False False 1 True True gtk-close True 0 False False 2 False 6 2 Synchronize GDK_WINDOW_TYPE_HINT_DIALOG True 12 True 0 0 Select a destination where all the downloaded PenguinTV media will be copied. If the destination is an mp3 player, make sure the mp3 player is connected before synchronizing. True False False True 12 True Destination False False True True 1 True True Browse... True 0 False False 2 False 20 1 True 0 GTK_SHADOW_NONE True 12 True True True Delete _files from the database after copying True 0 True False False True True _Delete files on destination that are no longer in the database True 0 True False False 1 True True Copy _audio files only True 0 True False False 2 True <b>Options</b> True label_item 2 True False 6 3 True 6 True True True gtk-cancel True 0 False False 1 True True _Preview True 0 False False 2 True True 0 True 0 0 True 2 True gtk-ok False False True _Synchronize True False False 1 False False 3 False 4 500 400 Synchonization Preview GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True True GTK_SHADOW_IN True True True <b>Synchronization Preview</b> True label_item True False 12 1 True 12 6 True False True True gtk-close True 0 False False 1 True True 0 True 0 0 True 2 True gtk-ok False False True _Synchronize True False False 1 False False 2 False 2 350 Synchronizing GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0.10000000149 False False True 0 <i>Copying...</i> True False False 1 True <b>Synchronizing Media</b> True label_item 6 True False 6 1 True 6 True False True True gtk-cancel True 0 False False 1 False 2 True window3 True True False 400 True True Search Feeds and Entries False False True True Search Feeds and Entries 0 True gtk-find False False 1 True True Clear Search 0 True gtk-clear False False 2 True True 0 True 0 0 True 2 True gtk-save False False True Save Search... True False False 1 False False 3 True 4 False False True True True True 3 True Show: False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag True All Feeds False 1 True True 5 Hide Viewed True 0 True False False 6 2 False True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False 1 False True True True True 1 True window1 True True GTK_TOOLBAR_BOTH_HORIZ True Stop Selected Downloads gtk-stop False True Pause Selected Downloads gtk-media-pause False True Resume Selected Downloads True Resume True gtk-go-down False False False True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC True True True False 1 Modify Search Tag False True GDK_WINDOW_TYPE_HINT_DIALOG True True True True 15 18 True 0 GTK_SHADOW_NONE True True 11 You may change the name of this search as well as the search terms. True True <b>Modify Search Tag</b> True label_item 10 True True Tag name: False False True True * 1 1 True True Search Terms: False False True True * 1 False False 2 2 True GTK_BUTTONBOX_END True True True gtk-cancel True -6 True True True gtk-ok True -5 1 False GTK_PACK_END Edit Favorite Tags GDK_WINDOW_TYPE_HINT_DIALOG True True True True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True False True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN 300 True True True 2 True GTK_BUTTONBOX_END True True True gtk-help True 0 True True True gtk-cancel True -6 1 True True True gtk-ok True -5 2 False GTK_PACK_END 1 True Tag Editor GTK_WIN_POS_CENTER GDK_WINDOW_TYPE_HINT_DIALOG True True True 10 True 6 True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True True True True 0 True 0 0 True 2 True gtk-edit False False True Re_name True False False 1 False False True False 1 True True 0 True gtk-remove False False 2 True True 0 True gtk-add False False 3 False 1 False True True True GTK_POLICY_AUTOMATIC GTK_POLICY_AUTOMATIC GTK_SHADOW_IN True True True True 2 True GTK_BUTTONBOX_END True True True gtk-help True 0 True True True gtk-close True -7 1 False GTK_PACK_END PenguinTV True True True _File True True _Import Subscriptions... True True _Export Subscriptions... True True True gtk-quit True True True _Edit True True Tag Editor... True True _Favorite Tags... True True _Saved Searches... True True gtk-preferences True True True _View True True _Standard Layout True True _Horizontal Layout True standard_layout True _Vertical Layout True standard_layout True _Planet Style Layout True True standard_layout True True Hides feeds that have been completely viewed Hide Viewed _Feeds True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Hides entries that have been viewed Hide Viewed _Entries True True True Fancy Feed _Display True True Show _Notifications True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK gtk-fullscreen True True True _Go True True _Refresh Feeds True True gtk-refresh 1 True Refresh Feeds with _Errors True True gtk-refresh 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Refreshes feeds with the current tag Refresh _Visible Feeds True gtk-refresh True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Mark all entries and media as viewed _Mark Everything as Viewed True True True Resume _All True True Download _Unviewed Media True True gtk-go-down 1 True _Show Today's Downloads... True True _Synchronize Media... True True True Re_fresh Search Indexes True True Fee_d True True _Add Feed... True True gtk-add 1 True Add _Feed Filter... True True _Remove Feed True True gtk-remove 1 True Re_fresh True True gtk-refresh 1 True True _Mark As Viewed True True _Delete All Media True True True _Properties True True gtk-properties 1 True E_ntry True True _Download Media True True gtk-go-down 1 True _Play Media True True gtk-media-play 1 True True Mark As _Viewed True True Mark As _Unviewed True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK _Keep New True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK D_on't Keep New True True True _Delete Media True True _Help True True gtk-about True True False False True GTK_TOOLBAR_BOTH_HORIZ True Add Feed gtk-add False True Remove Feed gtk-remove False True Refresh Feeds gtk-refresh False True Synchronize Media gtk-network False True Download Unviewed Media gtk-go-down False False False False Preferences gtk-preferences False True False False True True Using: GTK_JUSTIFY_RIGHT False False True True 0 bytes GTK_JUSTIFY_RIGHT False False False False 1 True 2 True 21 True True GTK_RELIEF_NONE True False 0 False False True True True 1 False 3 GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 5 Preferences GTK_WIN_POS_CENTER_ON_PARENT GDK_WINDOW_TYPE_HINT_DIALOG False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 2 True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True 15 True 0 GTK_SHADOW_NONE True 12 True True True Always show notification icon True 0 True False False True True Poll all feeds on startup True 0 True False False 1 True True Resume downloads on startup True 0 True False False 2 True True Cache images locally True 0 True False False 3 True True Automatically download new media True 0 True False False 4 True True 15 False False True True Limit total disk usage to True 0 True False 1 70 True True * False False 2 True 0 megabytes 3 False False 5 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 Media storage location: True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK GTK_FILE_CHOOSER_ACTION_SELECT_FOLDER Select A Folder 1 6 True <b>General Options</b> True label_item False 5 True 0 GTK_SHADOW_NONE True 12 True 3 3 True minutes 2 3 1 2 GTK_FILL 58 True True * 1 2 1 2 GTK_FILL True True Autotune refresh period True 0 True 3 2 3 GTK_FILL True True Refresh every True 0 True refresh_auto 1 2 GTK_FILL True True Don't refresh automatically True 0 True refresh_auto 3 GTK_FILL True <b>Refresh Mode</b> True label_item False False 5 1 True 0 GTK_SHADOW_NONE True 12 True 3 2 70 True True * 1 2 1 2 GTK_EXPAND 70 True True * 1 2 True 0 Maximum port 1 2 GTK_SHRINK | GTK_FILL True 0 Upload rate limit 2 3 GTK_SHRINK | GTK_FILL 70 True True * 1 2 2 3 True 0 Minimum port GTK_SHRINK | GTK_FILL True <b>BitTorrent Options</b> True label_item 5 2 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Main Options tab False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 15 True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Use Article Sync 0 True False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 GTK_SHADOW_NONE True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 12 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Read-only mode Download data only, do not upload 0 True False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 12 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Synchronization Protocol: False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 1 False 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK False False 2 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK <b>Article Sync Settings</b> True label_item 10 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Current Status: False False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 Not Logged In 1 100 True True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Log in 0 False 2 False False 2 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Article Sync tab 1 False 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK GTK_BUTTONBOX_END True True 6 gtk-close True 0 False False 1 False GTK_PACK_END PenguinTV-4.2.0/share/icons/0000755000000000000000000000000011450514774012507 5ustar PenguinTV-4.2.0/share/icons/stock-go-down.svg0000644000000000000000000000377110646750255015735 0ustar image/svg+xml PenguinTV-4.2.0/share/icons/stock-stop.svg0000644000000000000000000000166710646750255015352 0ustar ]> PenguinTV-4.2.0/share/icons/stock-media-pause.svg0000644000000000000000000000510710646750255016550 0ustar image/svg+xml PenguinTV-4.2.0/share/icons/stock-media-play.svg0000644000000000000000000000472110646750255016401 0ustar image/svg+xml PenguinTV-4.2.0/share/icons/stock-preferences.svg0000644000000000000000000001037510646750255016662 0ustar image/svg+xml PenguinTV-4.2.0/share/glade/0000755000000000000000000000000011450514774012450 5ustar PenguinTV-4.2.0/share/glade/dialogs.glade0000644000000000000000000030020711367440431015065 0ustar 5 normal (c)2008 Owen Williams Podcast and Video Blog aggregator for GTK+ and GNOME http://penguintv.sourceforge.net PenguinTV Sourceforge Site Owen Williams <owen-maemo@ywwg.com> With help from: Daniel Arbuckle <djarb@highenergymagic.org> j@v2v.cc None yet penguintvicon.png vertical False end 0 Login Required dialog True vertical True 15 vertical 18 True 0 none True True 11 A username and password are required for this feed: True True <b>Login Required</b> True label_item False False 10 0 True True User Name: False False 0 True True 1 False False 1 True True Password: False False 0 True True False True 1 2 2 True end gtk-cancel -6 True True True False True False False 0 gtk-ok -5 True True True False True False False 1 False end 0 Feed Properties 400 dialog True 6 vertical 6 True True True 12 True 6 7 2 12 6 True 0 Title GTK_FILL True 0 RSS Feed 1 2 GTK_FILL True 0 Homepage 2 3 GTK_FILL True True 1 2 True True 1 2 1 2 True True 1 2 2 3 True 0 Next Poll 6 7 GTK_FILL True 0 1 2 6 7 GTK_FILL True 0 Last Poll 5 6 GTK_FILL True 0 1 2 5 6 GTK_FILL True 0 6 Description 4 5 GTK_FILL True 0 True 1 2 4 5 GTK_FILL True 0 Tags: 3 4 GTK_FILL True True 1 2 3 4 True Feed Properties False tab True 12 vertical 5 Automatically download media True True False True True True False False 0 Only download single most recent media file True True False 0.46000000834465027 True False False 1 Include this feed in search results True True False True True True False False 2 Show notifications when this feed updates True True False True True False False 3 Never delete media automatically True True False If unchecked, media may be deleted to free up space True True False False 4 Work around feeds where entries appear to be in one large paragraph True True False Convert newline characters to HTML paragraph breaks True True False False 5 Automatically mark new entries as read True True False True True False False 6 Delete entries that are removed from original feed True True False Don't keep items in PenguinTV even if they disappear from the source True False 7 1 True Options 1 False tab 0 True False 1 True True 0 gtk-revert-to-saved True True False True False False 1 gtk-close True True False True False False 2 False 2 True window1 True vertical True both-horiz True Stop Selected Downloads gtk-stop False True True Pause Selected Downloads gtk-media-pause False True True Resume Selected Downloads True Resume True gtk-go-down False True False False 0 True True automatic automatic True True True False 1 Edit Favorite Tags dialog True vertical True True True True automatic automatic in True True False True True automatic automatic in 300 True True True 2 True end gtk-help True True True True False False 0 gtk-cancel -6 True True True False True False False 1 gtk-ok -5 True True True False True False False 2 False end 1 True Tag Editor center dialog True vertical True True 10 True vertical 6 True True automatic automatic in True True 0 True True True False True 0 0 True 2 True gtk-edit False False 0 True Re_name True False False 1 False False 0 True False 1 True True False True gtk-remove False False 2 True True False True gtk-add False False 3 False 1 False True True True automatic automatic in True True True True 2 True end gtk-help True True True True False False 0 gtk-close -7 True True True False True False False 1 False end 0 GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 5 Preferences center-on-parent dialog False True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK vertical 2 True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 True 15 vertical True 0 none True 12 True vertical Always show notification icon True True False True True False False 0 Poll all feeds on startup True True False True True False False 1 Resume downloads on startup True True False True True False False 2 Cache images locally True True False True True False False 3 Automatically download new media True True False True True False False 4 True True 15 False False 0 Limit total disk usage to True True False True True False 1 70 True True False False 2 True 0 megabytes 3 False False 5 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 Media storage location: 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK vertical True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK select-folder Select A Folder 0 1 6 True True 0 Media storage style: 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK | GDK_STRUCTURE_MASK on By Date By Feed Name 1 7 True <b>General Options</b> True label_item False 5 0 True 0 none True 12 True 3 3 True minutes 2 3 1 2 GTK_FILL 58 True True 1 2 1 2 GTK_FILL Autotune refresh period True True False True True 3 2 3 GTK_FILL Refresh every True True False True True refresh_auto 1 2 GTK_FILL Don't refresh automatically True True False True True refresh_auto 3 GTK_FILL True <b>Refresh Mode</b> True label_item False False 5 1 True 0 none True 12 True 3 2 70 True True 1 2 1 2 GTK_EXPAND 70 True True 1 2 True 0 Maximum port 1 2 GTK_SHRINK | GTK_FILL True 0 Upload rate limit 2 3 GTK_SHRINK | GTK_FILL 70 True True 1 2 2 3 True 0 Minimum port GTK_SHRINK | GTK_FILL True <b>BitTorrent Options</b> True label_item 5 2 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Main Options False tab True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 15 vertical Use Article Sync True True False GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 none True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 12 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK vertical Download data only, do not upload True True False GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Read-only mode True False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 12 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Synchronization Protocol: False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 1 False 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK vertical False False 2 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK <b>Article Sync Settings</b> True label_item 10 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Current Status: False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK 0 Not Logged In 1 Log in 100 True True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK False 2 False False 2 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Article Sync 1 False tab 1 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK end gtk-close True True False 6 True False False 1 False end 0 PenguinTV-4.2.0/share/glade/vertical.glade0000644000000000000000000002724111367440040015254 0ustar True window1 True vertical True False True True Search Feeds and Entries 0 True True False Search Feeds and Entries True gtk-find False False 1 True True False Clear Search True gtk-clear False False 2 True True False True 0 0 True 2 True gtk-save False False 0 True Save Search... True False False 1 False False 3 False False 0 True True Show: False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag False 1 Hide Viewed True True False 5 True True False False 2 False 1 True True vertical 124 True True automatic automatic in True True False False True True True vertical 315 True True automatic automatic in True True False False True True True True True True 2 PenguinTV-4.2.0/share/glade/widescreen.glade0000644000000000000000000003130011367440023015563 0ustar True window3 True vertical True False 400 True True Search Feeds and Entries False False 0 True True False Search Feeds and Entries True gtk-find False False 1 True True False Clear Search True gtk-clear False False 2 True True False True 0 0 True 2 True gtk-save False False 0 True Save Search... True False False 1 False False 3 True 4 False False 0 True True 1 True vertical True True Show: False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK False 1 Hide Viewed True True False 5 True True False False 2 False 0 True True automatic automatic in True True False 1 False True True True True True automatic automatic in True True False False True True True True True True 1 PenguinTV-4.2.0/share/glade/desktop.glade0000644000000000000000000011465311371402606015121 0ustar PenguinTV True True _File True True _Import Subscriptions... True True _Export Subscriptions... True True gtk-quit True True True True _Edit True True Tag Editor... True True _Favorite Tags... True True _Saved Searches... True gtk-preferences True True True True _View True True _Standard Layout True True _Horizontal Layout True standard_layout True _Vertical Layout True standard_layout True _Planet Style Layout True True standard_layout True True Hides feeds that have been completely viewed Hide Viewed _Feeds True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Hides entries that have been viewed Hide Viewed _Entries True True True Fancy Feed _Display True True Show _Notifications True gtk-fullscreen True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK True True True _Go True gtk-refresh True True True True True _All Feeds True True Feeds with _Errors True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Refreshes feeds with the current tag _Visible Feeds True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Mark all entries and media as viewed _Mark Everything as Viewed True True True Resume _All True Download _Unviewed Media True False True gtk-go-down True _Show Today's Downloads... True True _Synchronize Media... True True True Re_fresh Search Indexes True True Fee_d True _Add Feed... True True False True gtk-add True Add _Feed Filter... True _Remove Feed True True False True gtk-remove Re_fresh True True False True gtk-refresh True True _Mark As Viewed True True _Delete All Media True True gtk-properties True True True True E_ntry True _Download Media True True False True gtk-go-down gtk-media-play True True True True True Mark As _Viewed True True Mark As _Unviewed True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK _Keep New True True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK D_on't Keep New True True True _Delete Media True True _Help True gtk-about True True True False False 0 True both-horiz True Add Feed gtk-add False True True Remove Feed gtk-remove False True True Refresh Feeds gtk-refresh False True True Synchronize Media gtk-network False True True Download Unviewed Media gtk-go-down False True False False Preferences gtk-preferences False True True False True True Using: right False True True 0 bytes right False False False 1 2 True 21 True True False none True False False False 0 True True True 1 False 3 PenguinTV-4.2.0/share/glade/extra_dialogs.glade0000644000000000000000000020041711072477251016275 0ustar Add Search Tag False GTK_WIN_POS_CENTER_ON_PARENT GDK_WINDOW_TYPE_HINT_DIALOG True 15 18 True 0 GTK_SHADOW_NONE True True 11 Please enter the search terms you'd like to save, and a name for this search. True True <b>Add Search Tag</b> True label_item 10 True True Tag name: False False True True * 1 1 True True Search Terms: False False True True * 1 False False 2 True True 0 GTK_SHADOW_NONE True 12 True True 6 gtk-cancel True 0 False False 1 True True 6 gtk-add True 0 False False 2 False False 3 Edit Saved Searches GTK_WIN_POS_CENTER True 12 True True GTK_SHADOW_IN True True True 12 5 True 6 stock_dialog-info False True 0 You may click on the items in the list to edit tags. 1 True True 0 True 0 0 True 2 True gtk-properties False False True Modify True False False 1 False False 2 True True gtk-remove True 0 False False 3 True True gtk-add True 0 False False 4 False 1 True False 6 2 True True True True gtk-close True 0 False False 1 False 6 3 Create Feed Filter False GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0 Please enter the name for the new filtered feed, and the search terms you'd like to filter for True 6 True 12 3 2 6 6 True 0 Search Terms: 2 3 GTK_FILL 200 True True 1 2 2 3 True 0 Filtered Feed Name: 1 2 GTK_FILL 200 True True 1 2 1 2 True 0 Original Feed Name: GTK_FILL True 0 unknown 1 2 GTK_FILL 1 True <b>Create Feed Filter</b> True label_item True False 1 True True True True gtk-cancel True 0 False False 1 True True gtk-add True 0 False False 2 False 6 2 Feed Filter Properties False GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0 Please enter the name for the new filtered feed, and the search terms you'd like to filter for True 6 True 12 4 2 6 6 True 0 Search Terms: 2 3 GTK_FILL 200 True True 1 2 2 3 True 0 Filtered Feed Name: 1 2 GTK_FILL 200 True True 1 2 1 2 True 0 Original Feed Name: GTK_FILL True 0 unknown 1 2 GTK_FILL True 0 Tags: 3 4 GTK_FILL True True * 1 2 3 4 1 True <b>Feed Filter Properties</b> True label_item True False 1 True True True True gtk-revert-to-saved True 0 False False 1 True True gtk-close True 0 False False 2 False 6 2 Synchronize GDK_WINDOW_TYPE_HINT_DIALOG True 12 True 0 0 Select a destination where all the downloaded PenguinTV media will be copied. If the destination is an mp3 player, make sure the mp3 player is connected before synchronizing. True False False True 12 True Destination False False True True 1 True True Browse... True 0 False False 2 False 20 1 True 0 GTK_SHADOW_NONE True 12 True True True Delete _files from the database after copying True 0 True False False True True _Delete files on destination that are no longer in the database True 0 True False False 1 True True Copy _audio files only True 0 True False False 2 True <b>Options</b> True label_item 2 True False 6 3 True 6 True True True gtk-cancel True 0 False False 1 True True _Preview True 0 False False 2 True True 0 True 0 0 True 2 True gtk-ok False False True _Synchronize True False False 1 False False 3 False 4 500 400 Synchonization Preview GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True True GTK_SHADOW_IN True True True <b>Synchronization Preview</b> True label_item True False 12 1 True 12 6 True False True True gtk-close True 0 False False 1 True True 0 True 0 0 True 2 True gtk-ok False False True _Synchronize True False False 1 False False 2 False 2 350 Synchronizing GDK_WINDOW_TYPE_HINT_DIALOG True True 0 GTK_SHADOW_NONE True 12 True 6 True 0.10000000149 False False True 0 <i>Copying...</i> True False False 1 True <b>Synchronizing Media</b> True label_item 6 True False 6 1 True 6 True False True True gtk-cancel True 0 False False 1 False 2 Modify Search Tag False True GDK_WINDOW_TYPE_HINT_DIALOG True True True True 15 18 True 0 GTK_SHADOW_NONE True True 11 You may change the name of this search as well as the search terms. True True <b>Modify Search Tag</b> True label_item 10 True True Tag name: False False True True * 1 1 True True Search Terms: False False True True * 1 False False 2 2 True GTK_BUTTONBOX_END True True True gtk-cancel True -6 True True True gtk-ok True -5 1 False GTK_PACK_END PenguinTV-4.2.0/share/glade/standard.glade0000644000000000000000000003263411367440031015245 0ustar True window2 True vertical True True True vertical True False True True Search Feeds and Entries 0 True True False Search Feeds and Entries True gtk-find False False 1 True True False Clear Search True gtk-clear False False 2 True True False True 0 0 True 2 True gtk-save False False 0 True Save Search... True False False 1 False False 3 False False 0 True True Show: False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag False 1 Hide Viewed True True False 5 True True False False 2 False 1 True True vertical 124 True True automatic automatic in True True False False True True True automatic automatic in True True False True True 2 False True True True True 0 PenguinTV-4.2.0/share/glade/dialog_add_feed.glade0000644000000000000000000003653011106170365016477 0ustar Add Feed False GTK_WIN_POS_CENTER_ON_PARENT GDK_WINDOW_TYPE_HINT_DIALOG True 6 6 True True True 15 18 True 0 GTK_SHADOW_NONE True True 11 Please enter the URL of the feed you would like to add; and any tags you would like to apply to this feed, separated by commas: True True <b>Add Feed</b> True label_item 10 True True URL: False False True True * 1 False False 1 True True Tags: False False True True * 1 2 True Feed Properties tab False True 12 5 True True Automatically download media True 0 True True False False True True Include this feed in search results True 0 True True False False 1 True True Show notifications when this feed updates True 0 True False False 2 True True If unchecked, media may be deleted to free up space Never delete media automatically True 0 True False False 3 True True Convert newline characters to HTML paragraph breaks Work around feeds where entries appear to be in one large paragraph True 0 True False False 4 True True Automatically mark new entries as read True 0 True False False 5 True True Don't keep items in PenguinTV even if they disappear from the source Delete entries that are removed from original feed 0 True False 6 1 True Options tab 1 False True False 1 True True False True True 6 gtk-cancel True 0 False False 1 True True 6 gtk-add True 0 False False 2 False 2 PenguinTV-4.2.0/share/glade/planet.glade0000644000000000000000000002652311375016474014741 0ustar True window3 True vertical True False 400 True True Search Feeds and Entries False False 0 True True False Search Feeds and Entries True gtk-find False False 1 True True False Clear Search True gtk-clear False False 2 True True False True 0 0 True 2 True gtk-save False False 0 True Save Search... True False False 1 False False 3 True 4 False False 0 True True True vertical True 3 True Show: False False 0 True GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK Show only feeds with this property or tag False 1 Hide Viewed True True False 5 True True False False 6 2 False 0 True True automatic automatic in True True False 1 False True True True True 1 PenguinTV-4.2.0/share/mozilla.css0000644000000000000000000000256211113040072013537 0ustar /*body { margin-left: 24px; margin-right: 24px; // background-color: #fffe92; }*/ .heading { background-color: #f0f0ff; border-width:1px; border-style: solid; padding:12pt; margin:12pt; } //#nav_bar { // border-width: 2px; //border-style: solid; //} .feedtitle { font-size: x-large; font-weight: bold; text-align: center; padding: 12pt; } /*.entry { border-width: 2px; border-style: solid; border-color: #777; background-color: #edf0f5; //margin-top: 12pt; padding: 3px; }*/ .stitle { font-size: large; font-weight:bold; // font-family: Sans-Serif; padding-bottom:20pt; } .sdate { font-size:8pt; color: #444; font-style: italic; } .content { padding-left:20pt; margin-top:12pt; } .media { background-color:#ddd; border-color:#000000; border-width:2px; border-style: solid; margin:8pt; overflow: auto; } .media a img { border: none ; } dd { padding-left: 20pt; } q { font-style: italic;} blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } .blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } PenguinTV-4.2.0/share/penguintv.service0000644000000000000000000000010110752701526014762 0ustar [D-BUS Service] Name=com.ywwg.PenguinTV Exec=/usr/bin/PenguinTV PenguinTV-4.2.0/share/mozilla-planet-hildon.css0000644000000000000000000000271611113040054016274 0ustar body { margin-left: 2px; margin-right: 2px; font-size: large; } .heading { background-color: #f0f0ff; border-width:1px; border-style: solid; padding:6px; margin:6px; } .feedtitle { font-size: xx-large; font-weight: bold; text-align: center; padding: 6px; } .entrynew { border-width: 3px; border-style: solid; border-color: #333; background-color: #f5f7fa; margin-top: 6px; padding: 3px; overflow: auto; } .entryold { border-width: 2px; border-style: solid; border-color: #777; background-color: #e0e3e8; margin-top: 6px; padding: 3px; overflow: auto; } .stitle { font-size: x-large; font-weight:bold; padding-bottom:20px; } .sdate { font-size:12pt; color: #444; font-style: italic; } .content { padding-left:20px; margin-top:12px; } .media { background-color:#ddd; border-color:#000000; border-width:2px; border-style: solid; margin:8px; overflow: auto; } .media a img { border: none ; } dd { padding-left: 20px; } q { font-style: italic;} blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12px; margin:12px; } .blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12px; margin:12px; } PenguinTV-4.2.0/share/gtkhtml.css0000644000000000000000000000270311136676776013576 0ustar body { margin-left: 24px; margin-right: 24px; // background-color: #fffe92; } .heading { background-color: #f0f0ff; border-width:1px; border-style: solid; padding:12pt; margin:12pt; } .feedtitle { font-size: x-large; font-weight: bold; text-align: center; padding: 12pt; } .entrynew { border-width: 3px; border-style: solid; border-color: #333; background-color: #f5f7fa; margin-top: 12pt; padding: 6pt; } .entryold { border-width: 2px; border-style: solid; border-color: #777; background-color: #e0e3e8; margin-top: 12pt; padding: 6pt; } .stitle { font-size: large; font-weight:bold; padding-bottom:20pt; } .sdate { font-size:8pt; color: #444; font-style: italic; } .content { padding-left:20pt; margin-top:12pt; } .media { background-color:#ddd; border-color:#000000; border-width:2px; border-style: solid; //padding:8pt; margin:8pt; //overflow: auto; } .media a img { border: none ; } dd { padding-left: 20pt; } q { font-style: italic;} blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } .blockquote { display: block; color: #444444; background-color:#EEEEFF; border-color:#DDDDDD; border-width:2px; border-style: solid; padding:12pt; margin:12pt; } PenguinTV-4.2.0/share/pixmaps/0000755000000000000000000000000011450514774013055 5ustar PenguinTV-4.2.0/share/pixmaps/PenguinTV icon.png0000644000000000000000000002235410646750255016363 0ustar PNG  IHDR>abKGD pHYs  tIME#D[ IDATxy]E?Ug{޳tvda`ADVqPQQP6ep@GefPqQ@G("E@%@$IwolU?Mh {穧o9ֻU[0,2,^]޺Zg[xŌa;0X|+f 0 P wzJ/ykY,斿y9 ?,bF'f`Kb>ÀG'd={X_G/>ou#_:2_К/ktڊ_]%+~_Z+Wv6){dYzN/~< [>?̐roqOJk}nahsLYqW]]mֵ]~S︽nYw ?@ k[bWKktK?r~HE'NijjR)(0Xgy|}^B G ^-;lB?.dʚv-wApzLjՈ[1Mӵm{ss=>0 e{|拟z$xrmn] ׆T矗w܇ mJAP8 Me}{Μ9Zx`$om=|'7ع/Eեفۼ&ȷ7\kC.d'<ϻ]k]iYӧOgԩضMKK Vb۶ӸRJa׸,I$xZ˷opɒ$ |M+zKh²#@X @@}}=s ~8an:,Y¦M嚆aDri"pp@_/tn ?~%{NM+ǖue ?~bxvKڀbH[[vq,k*F ϟog&q#A Y?qBE^O͹b?<)voݍ+ ̓.gu|[R}|L&C6E)EKK ]]]yIw!]=GO8O􋫷5Lzu&yE8uO!^;[f755VS2 N|өS,8-iJi^[_?_zeLd H"x:Lf)|_ 93=V,Ua+v`nչ~ɕw t]WxA@H)gr~ H+B!mRBk;0#=vj;f9 /4RIߙC /@3Bݎ)wr{.:{bV ÎbDbU {_=_+ dVfοzbFkh];:W@+EuxV#{gHVvV$U}~]WyCRx7CGkb=aǪ ' $*4j5KE^"# A*G؎iy KT@I=3t}pW]̦zHh۶?]}cN>re(>BG^hУrJ ݋gȕN9(+ R%#h||3 0@Igxh I`(\@Io FBaICg e]Br#@A A5";"%%3魀|"l!1Zh n@h _Fm?e(a}(JHI˨n!ޤ꽴k H x-e %2Z!iDP:j:(kJXV(eһ AƈX57odt bNl{^C~!bWGv0옴"&1(Ҍ -ii"M ! BUaHd3Z"hí:-!rBȝy=zk"Fߧо9߶M6J)؛E6 )}aaNjvREuVjt$VV7XT VH 3f$:cyZkti&ٛ@ХDkbpZZ2C@.adznc-jۯFx~rh[*+uks0׎S鱇%SXhd5tv/N"zs3*BPVX~LDzuSӴ t}5-ٕAY}z$74 H$N 3L yM f4"@!:Wˊ'AGQ&NȘ1cFAi︊e*-&bL?n< L!Le p#RH#\683(L͚5iӦN Wi7a 6k;b5cV$XM}ċ V2$WD @6O<1J$ =XΝKmm-8.+En< iITjƼ| HR v4iĕ`6/|YgkRH$ϩJCC۶m{S}`p]4Tv7 RQˈ 8 銤&NӨpJ6B瞧}aPpg 5;~SSWX .!BgWdSIO9xUZ*&P Tz/W rlܸg}zӭ*P jaQZi\*EZ-˰#$jǎTx ,aj bxif;i_4eww5Zc{xW^y 4[=B҇ b b b{>,hvƼ\8p` PZk[M:RL^h+f1'i,/gp̱gmWF4 gVo|ήA%q+ 7)< ]-Í-0f!$qU^[\7V<$j`ҥQJT{h8?ȑ:0@'|X,5cVV =2_Jf;*R N}LzZ$Z=@j>xQ@M4{)/"L;N;Nŋ%8`t~wb3z 'M7("QS1h';VI )pգ<ϻ}PgO㥥tSOG*'~LHPywk!-QpOo_ONU7I8#5 |3g_tR3Z #K1?``Mp: v領mwdqqy}P(|FWL/N{'Rb۸,ZF"5zJ\T=m]Mn3j{տj&nhMCFvlۛ^S^-/=wygEgg=A=A0\MWJh 4Dj?1j vzm+dbEߩ wpJL*bw̍Ǭ+#۲?OnV:BRiYDbbHAUG7QYkv̬td~.*'jFF^QufWƖĈI+u͆I >{̶HHNq)rZ#]Eѹi=O}kҨ#prn@Īm+f\BmW||0c {ڵSJ]"6%1jJK/z6L[ H#Ê1#qA(>ki1eJ#$An"nY/'!Z0F` Y{:`/`ƍZG6i}q&;lh־_|wӽ1 4-eq!0# &BXQa !iiqa7G>a .R:Y$ڨ}'˽}&cW!.NIK0=b %a*C*][yٹ ,.:B`RRc QlرqBݭk ;oPJ̰쐞MfJUf,BL=٬ 5@" m*P1szVGֿ]jґh= ,/Ê\Ƚ3 ('(aߐsȴvC= ]xN~\bqs]_u[{pg0olkG]z饕;y[k !ZkikmZ?={gp RaayhgNBbO뷄aΫܩnwO{Zzioo_˰Y ;]NmUSo]]OXWl~79=mk=d8ۍ/*n8qbD@5^>AP6*`SSӥ֭~jP*/f_]~5^J H}{_͓p߿fN1W3мQvyZ;ؒ4t]~wS\*C@o=G+ߧ`XmۇۮF*w,d2Y{Q}H@ B0{%w/tv!C"GûgQXﲹ#-L63 餪͋?fHi# Q#?n= b Hm*wI'@m5wbH8D&vniOOAb.g #kN2F "qHG1i}^g,e3@%IX^l!Q 7nddD|G TW;QDq[vbceO<S&* pRp0 @$ ֶnp' ~un>𩧥1#_h]#vljEu O Jjx4 JTXQi|_F fY^:z>z?=m?rɈ tuL[Zelq+v١";F):*&JIU iH'$U2ȯ8wòߝVVN`OS5MgfؕԤcv4!XLC" 7F-  KUzf.M@̯DK [}̜,ޅ5B )Z{ R ” nq'v MJ(&" 0of 6BBxMM۠+YV\<) EM`a3Δ0>Zh (VNˠ/6~=8h[ /[\[*)[wy2 l_])@=i:ozdŒN7<@V}sE]pR?jeJv Secx1-J-m&H_` GΘg!AԴ@iAi'.tᴈ^CN=w7KuZ@+RjPV\=enPoV;#&̛9 ![Z7;WNHRy7ja"Jm$.>c{A*7 hp Ĕ4O*Vuf^@U ULv,ƎN }#^?ϻݺjP0bI'r/tTWxKh[SG[{IzF 崏4ȳioW߷{2Y3h -mbwoHxY6])h JKI F# PĐ[mIDAT^q`L-O˵gnhD;4>J}!@CyNg0mowZYD'}AHT JW$l6VΝX4H8ꋛֽ%o^%{i+ϵ[/wqJi|_yӸnhܢs|OM@N䌲 @\v̜3I|Ojv}W! ﮈBsl|_x.ES b 9b yM p 9>'Qxz[Pz}*VCV70lYGpy:z>{/(Q@)J'{V)RxJJ rO8sNeKWFgƋf0uϊC ߔdX Dt@RF~h}v͘w_jǿw@;rrb3vzODZF_g֡jӧX|3QTA 4(4~F|p!Vd/~ļO>R3n~amZ}Wg~pI\ʹa[O,q ~MJY_" *_񔁧J3òɳ$#rx~sUs䋑̳߭ws#?(L ᾯFŪ(9;|d;N_ aDY9H&xѱZ KnwXr=L| PH^dÇ%cf}, 1%X?|3O{s.Gc JoChy3:vabW7)ƻr1bx揟?O):V%_ s3~;\3,jW8/ēߟqb} t !{%wdqv+ӹ*}7|xjb3XF3KNFPQ7 xԞ.U`` /7QY"Lu%Hv9RK+Bg'1Yͼqo3tFFFF9J>Bd]?]S :IENDB`PenguinTV-4.2.0/share/pixmaps/64x64/0000755000000000000000000000000011450514774013650 5ustar PenguinTV-4.2.0/share/pixmaps/64x64/penguintvicon.png0000644000000000000000000000717211057041725017247 0ustar PNG  IHDR@@iqsRGBbKGD pHYs  tIME "-{6 IDATxytTUǿ*ڲ@ $@l҂H#h.è 4zFgLh,#pm[euDٔ5&BBJU-#ryN~nυ,x `W}HWC0V/x,CYBOoS[nU5rx^E.#+F.0=WzS,?AXxO@^^FdXnO(MEqeY_u<k,anjpӷKp`8іv;z֮] UUoD"(v&dJi)۶ҥK<-HXМ5;:xZsk>rBwg$Iz1ReezqQR+pIm{ `glBJo]7x[W1~ 4cOI_l~*rRx<iضmh!$Ilw\ЮD빻=wwTh8?OAu8cHA_zQ߫JO𒝈'=f87~)v>|A`s*L_FCWq޲Bj 3oڢg9鑋}ZXղWo^+.AWa-5γrYPr;oԧ-bMsR:LZRG9!0@`t}&s IZҥA64i- Nꭶt} x۶egښgyb@%*AEG" pa)P0! }减O=/eqؠl$$Ҩ&B8 7Sn8p@`lj  )&ԩRmn9d"Ob,IqhP$Í3BTqsQDWQr\rm\WR}DCt"$7р 5AuGd  9į&%B79Y ](Wp.HJB 5i9ɑ:Nh|A#sTU ι+cDHr䣽7l؀e˖9VNz4W1KVۅf/tKcJDB 2D8Xz5,Jۂ6rfqje!$7ӰB<r(5˲'2fXT%Hi4LD)KnAq?@{o~)$9g1*R iTdےΓݾިQp$ݔ1>+ٳgg1ATWWC4A$GU 򉹽@v{Xtv)8xaa!YlDQ L$ƒcH#jQ rP6f `ܹ(qkLjE >N,Ȯ V _f2^ ݻ7nai?Ch3pT fY/69u d+'31Vޕ RoB,*y sUnjAR=3 @QIٞX;j}%qe1g[ $ErؗvA$ *9Wzcnb?Z"ֱK'*Zk:}eO 3 !IZLD"X9^9^~;/*Y߮w+J{O V"onNꆩI p˲q=e uW"eD oZZ3i~p[#&*PRzұ_Cq3ƚO^^K***3gμ@!^H& r3-g#FѶQK D`<̣Xhʼ?d|5_J)>5b0\DLL$M|%Q[e:IKI8)PHcɗRo_7=0 x|p;|c7o\9%[*HLɕC$5IuCСI 5uT:aeuɦM o^p3T.Z9ɧtnOz9u2FQ"8pF&I]q_9 m*nwDU!6~4ݹÌn+Cn?0{(+v__2WaП '4FFgHkݔ)-tQ;}q~.Bss" j4X|p/v<?U>yCGH?dBB@V $ئColeu͚Q$ߡ)VQ"~I]1 (,@8a6OCI#`}b L! e iisp 6m'_>X7Ѿr00.0m0+ D/ɝsdro|FE.B\[eREƦd9l䢏>t*^`G\` X Y{K , HāX# GG;CG"Sl% &[mw<4x+6߷њ{]6؀esP9LDm}ߋ&q][Z Orv:ӟPO'pc*0m*889 v ѡ%d]>`Ǿ׾pmW>65^7W4?9@=F|!vc&T*F>]7qv/>1g?]?Ҳâ:Y]ߞ0)FFt^_'`(Bt pu*1th:ں kխnIENDB`PenguinTV-4.2.0/share/pixmaps/ev_offline.png0000644000000000000000000000115310646750255015701 0ustar PNG  IHDR w/bKGD pHYs&?tIME XAIDAT8˅kA?k'=bQVYp^4ۃ,l As RAM{(^J jjZ7nи[& fhJɄ(%NEDCzXkrE q"3Ef3t\.R1*g^e4e2&9=yL.;Nx2aΦG94[ܖӛ{Mq` )yz66YBlNyxQN-+5 o6Wm?#s4nߺ@Toq\G_I.Y /K2+n r]/0O_oQrͩ<|b6?mY[-a[7 SmV"gJh'#B`kf/R.ÖmY8h]U UDV5oZŵ0􌈲S$s))\AIENDB`PenguinTV-4.2.0/share/pixmaps/40x40/0000755000000000000000000000000011450514774013634 5ustar PenguinTV-4.2.0/share/pixmaps/40x40/penguintvicon.png0000644000000000000000000000421511057042025017220 0ustar PNG  IHDR((msRGBbKGD pHYs  tIME #1v[ IDATXõ{pTwǿ>v7GIGDeJ5?0VhZ;BjZ00t:LFF)*B@IA d3s{s99Gaz`<ꟛ[{2/e~ W6"8)*ş3%O. zO4/`#RcM*n7nalr3{[hמtN89sT06 !fpίA1p|;Ů wv?}'\XcHmƃ$ JBL w.e`Cf޴Zvр;T'j:e:9Hх7Yd޴Qa빈p6=r|;HpQ{RI[B(X.#ۨKD5FE[SH |B ^(ѠR=gL.LH+*ŕmoZkmmt:'N8 %`.zKnLdJT}>ᜯds(MB"̤gb43o\WhtJOܹs …=4tNV 5i0 u,|txi׋+2~;5L4KIr>s?$`II BnIn-lDBkqp`_&fEe2yL6.ht7wXMfi"Z4kW\М6LIr͑eGc>do&!" .'T Xy.(Gc$c8%:zh_4FIR=}"m*$SctXfDU^i/*77[)5`@SIc@_?,K =,\DTM}sw2!dQߟ<)u2㑻9AhZd_"#9.L>+X&sؑ `wt0v|Z(W`(qΛʅA)#;$E K3a HUUeY=Bh^J8РVVpTR%#U]'* !+ƃ(mڇ$E}8S5J„ _.5};!uN@2S%{tu* WoSh{x=2+moY,y$eV&qĎxcvl&m9f jx`CQ9 v2#9sߧ=k(#۫#aKslO*- #K>jt]^s&dvkeW%sCCi *r^QSG.!%AU B!<E?!73m\@U ϋ4w}OT]~uK4\ߴgE4m6*r l4*wsOPۡPT[ uS@+pup@KA\IENDB`PenguinTV-4.2.0/share/pixmaps/throbber.gif0000644000000000000000000002232310712345465015354 0ustar GIF89a ٺۡoooxxxvvvmmmaaaGGGqqqkkk...222777\\\jjj׮444]]]ˉhhh---000ϸ)))___ttt  (((͙UUU{{{<<>>999CCC"""+++NNNIIIcccLLLQQQWWWPPPJJJ$$$ZZZXXX&&&SSSdddfff!!!rrr555yyy !Created with ajaxload.info!! NETSCAPE2.0,  964!5:; "";=5 ? 7@)26<>(B(.*0(#"3B/(8$+1  $"pA&X("["hDBhH‡ A D ED^:*ŋiH[ @N"^`:G@=IBTD "nD  (-$`ܹxwR !, *X[ VY\ 1-LQ IJ3ZQ $FJLP TVZ] 7@GKMQLSUW<^B(. &KPR(@KJB/(8B12˄4 :j/Q "E% 'RPA!BZH۶$0 Ep"q耈BGxѭz@2=P'.8daۋG4/H"-)dA^N8ƶ pJBHΆ ˷߿!, $N?h6b-(.$ b\H!!US((1@/(8.'S u .FbŔ @@^"C.A x(:*zr ~/)*@3_=sς W>  Ƞ  . T#ŀ$AG cXn] 7AdXhJ +_[\6_l FF n8_km-oHZ=pS.?b-im=eG4 D'GXDϛ!i "R22%E8օP@  9, EBJT$ǍA EuM_AP8Y @6}XD%2")TRDaL `1TNBT=T`ѹxWo !, BCHQ"E kQq b`DNj*c .  # )JO6 (3PQCUrY:?N9+RO#:.9,6p;a(/ Qn)r" HXt6EEB66ġpHA$6,hCT>*B$^IcEaQ& _`yRȊ7) 0 yDh:(,KHQ<[lʝK@!, .+l %T3 &M#sb /Ol( tPk _nQR^?Hi It _egrN+ENu`l?vVu So6[/FfrpFjo%GF$ AƂQ@P(Јhт@EPhDAC0nHHFn܈@.2bQ2 Q@/\"ȦYFh|ё荋&5 }h4 ` L+pc젵5Lx0$˷߿}!, l#m2 mc w_ 2C$(\t$)nG j1Gm\ 0hvc!382#F)Bu+\|jb>FBhB7d, ~ؘD"/*:ңdܸPTY.@S (^,DI$B$GȞX|UBBG5,U A72Ks*@bܻx݋7!,  i! SH 2C?,2H96.GM%/BuHw NTPL7((//?/C]ML3@($x .^Lx.ц[u4@LH=h EȆ+VPq"YBj\dFBEĈX9·}3D H; Bp8=p_ǎ 8 ` "砱.'8#PT ʝKݹ! , $ C37,C/j %4Dmc$/BiD%F\] ((/@u]K8(4$Nc]T9 /4d.̆ '[ABƉ 0aT$ϐc"+2Lb!fBc@ 7 *z! &N3TG: b1Dg \"K!@6)P@zL[7t іHDЂ8t &$:PH˷_! ,  BO,$i2,@ 7i2o&-B/7B[S+&k((/B-jb[8( 4_kFH /ۅ[q6.Ȇ ,' @@B)0J&bP$0ZC9RB)D*cG\!0JPCb^d!C؄T 1h2#N1 =(fZ6$#5Ȉ2*tY^.M(v+n$˷߿! , 7 (( / D477BR2G9(AB(.R6 8( u&^ /Åjt. O!皋$88.@cd@T'0PEQ L'EQ p ȀSY(x,[@/+XN<A X(<)K^"thxd']pS F~˷߿!,  8(( .7B(A/(8/B-/  (-j@(/B-S(.ֆB܈B+EN6(41 [˗—c(Rw"!D$ܹ9B 2vsːl:v B"!440و1OJ)E"=ȕ 4G VBŎ1@xahZA>R⊎"؄DHʝK.@!, $$8((  B(A///BB  (/(8.І4 NjS++R(ۉ,UEHH?S 6\(w7Hlaǭ(eF  "L`M" LjįrZ!F*y(6l[9X p BC*+ p:)BB$& @ a "OmDFOٰ۷pF !, $$((  B(A/4/B7(8 (/B.҆ /j ,̉(݈7uC'RQp㜢fq[!⚹e 5 ͅ!/cTֈįsK.q(xT ,0$Ns<B ؁aX0?@P ;> HU1#QPb.ࡒn$˗P !, $.(($ B(A (/B7( B/(8.І4@Ջ(܉ÿٟ7A/FZ@R"X~ 04/9pfXg"$``<-&KRc@"@ &)D|);R0=Z㛀0h" P΁< Gؘ8 0Zi#" q x6RPnl@IFڤ $!^ -dvݻx !, B.(($B-M)E(A$@Y:VB7( pZ;Î8TpgB/(8%[̅44B4isdK@ 5QQx(""t(T8C`@I1Ѧ! ".\@3uFQȅB/(84؋/GsN@(H$ Ϛ6ma6 |8Xe &P'*cE:PJBQDO."QNSI8tBw<tPMgP@FA nH7Dvf Z0ܻx݋)!,  E3 B!t;:G"x;=&4\)*D$@ X26Pn8 2(_K(4$ڵpʝKWn ;PenguinTV-4.2.0/share/pixmaps/ev_online.png0000644000000000000000000000102310646750255015537 0ustar PNG  IHDR w/bKGD pHYs&?tIME  QIDAT8˝ka KB[`?P "AoRz*؃bxTēDP+ iMKp&,< 3p87lCəSsYʋXNnr7]'N0E&Bޣt_ܸH$ @l0ι)NonQߨx˻ngI0H'u V1G8H`B`[kX?MV>}^R]DOoӝ {mվ|nYa:f!?uEym2+%%?4 ]GXOW xաL&qb#`w4EHUiz8IENDB`PenguinTV-4.2.0/share/penguintv.gladep0000644000000000000000000000036010646750255014572 0ustar PenguinTV penguintv PenguinTV-4.2.0/feedparser/0000755000000000000000000000000011450514774012412 5ustar PenguinTV-4.2.0/feedparser/README0000644000000000000000000000044710646750254013300 0ustar Universal Feed Parser Parse RSS and Atom feeds in Python. 2000 unit tests. Open source. Copyright 2002-4 by Mark Pilgrim Python-licensed ----- To install: $ python setup.py install Full documentation is available in the docs/ directory, or online at http://feedparser.org/docs/ PenguinTV-4.2.0/feedparser/setup.py0000755000000000000000000000334310646750254014133 0ustar from distutils.core import setup # patch distutils if it can't cope with the "classifiers" or "download_url" # keywords (prior to python 2.3.0). from distutils.dist import DistributionMetadata if not hasattr(DistributionMetadata, 'classifiers'): DistributionMetadata.classifiers = None if not hasattr(DistributionMetadata, 'download_url'): DistributionMetadata.download_url = None setup( name = 'feedparser', version = '3.3', description = 'Universal feed parser, handles RSS 0.9x, RSS 1.0, ' 'RSS 2.0, CDF, Atom feeds', long_description = """\ Universal feed parser --------------------- Handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom feeds Required: Python 2.1 or later Recommended: Python 2.3 or later Recommended: libxml2 """, author='Mark Pilgrim', author_email = 'mark@diveintomark.org', url = 'http://feedparser.org/', download_url = 'http://sourceforge.net/projects/feedparser/', license = "Python", platforms = ['POSIX', 'Windows'], keywords = ['feed parser', 'feeds', 'rss', 'atom', 'cdf'], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Other Environment", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Communications", "Topic :: Internet :: WWW/HTTP", "Topic :: Other/Nonlisted Topic", "Topic :: Software Development :: Libraries :: Python Modules", ], py_modules = ['feedparser',] ) PenguinTV-4.2.0/feedparser/LICENSE0000644000000000000000000002734110646750254013427 0ustar Universal Feed Parser is released under the same license as Python 2.1.1. This is the official license for the Python 2.1.1 release: A. HISTORY OF THE SOFTWARE ========================== Python was created in the early 1990s by Guido van Rossum at Stichting Mathematisch Centrum (CWI) in the Netherlands as a successor of a language called ABC. Guido is Python's principal author, although it includes many contributions from others. The last version released from CWI was Python 1.2. In 1995, Guido continued his work on Python at the Corporation for National Research Initiatives (CNRI) in Reston, Virginia where he released several versions of the software. Python 1.6 was the last of the versions released by CNRI. In 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. Python 2.0 was the first and only release from BeOpen.com. Following the release of Python 1.6, and after Guido van Rossum left CNRI to work with commercial software developers, it became clear that the ability to use Python with software available under the GNU Public License (GPL) was very desirable. CNRI and the Free Software Foundation (FSF) interacted to develop enabling wording changes to the Python license. Python 1.6.1 is essentially the same as Python 1.6, with a few minor bug fixes, and with a different license that enables later versions to be GPL-compatible. Python 2.1 is a derivative work of Python 1.6.1, as well as of Python 2.0. After Python 2.0 was released by BeOpen.com, Guido van Rossum and the other PythonLabs developers joined Digital Creations. All intellectual property added from this point on, starting with Python 2.1 and its alpha and beta releases, is owned by the Python Software Foundation (PSF), a non-profit modeled after the Apache Software Foundation. See http://www.python.org/psf/ for more information about the PSF. Thanks to the many outside volunteers who have worked under Guido's direction to make these releases possible. B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON =============================================================== PSF LICENSE AGREEMENT --------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 2.1.1 software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 2.1.1 alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001 Python Software Foundation; All Rights Reserved" are retained in Python 2.1.1 alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 2.1.1 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 2.1.1. 4. PSF is making Python 2.1.1 available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.1.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.1.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.1.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python 2.1.1, Licensee agrees to be bound by the terms and conditions of this License Agreement. BEOPEN.COM TERMS AND CONDITIONS FOR PYTHON 2.0 ---------------------------------------------- BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the Individual or Organization ("Licensee") accessing and otherwise using this software in source or binary form and its associated documentation ("the Software"). 2. Subject to the terms and conditions of this BeOpen Python License Agreement, BeOpen hereby grants Licensee a non-exclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use the Software alone or in any derivative version, provided, however, that the BeOpen Python License is retained in the Software, alone or in any derivative version prepared by Licensee. 3. BeOpen is making the Software available to Licensee on an "AS IS" basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 5. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 6. This License Agreement shall be governed by and interpreted in all respects by the law of the State of California, excluding conflict of law provisions. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between BeOpen and Licensee. This License Agreement does not grant permission to use BeOpen trademarks or trade names in a trademark sense to endorse or promote products or services of Licensee, or any third party. As an exception, the "BeOpen Python" logos available at http://www.pythonlabs.com/logos.html may be used according to the permissions granted on that web page. 7. By copying, installing or otherwise using the software, Licensee agrees to be bound by the terms and conditions of this License Agreement. CNRI OPEN SOURCE GPL-COMPATIBLE LICENSE AGREEMENT ------------------------------------------------- 1. This LICENSE AGREEMENT is between the Corporation for National Research Initiatives, having an office at 1895 Preston White Drive, Reston, VA 20191 ("CNRI"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 1.6.1 software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, CNRI hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 1.6.1 alone or in any derivative version, provided, however, that CNRI's License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) 1995-2001 Corporation for National Research Initiatives; All Rights Reserved" are retained in Python 1.6.1 alone or in any derivative version prepared by Licensee. Alternately, in lieu of CNRI's License Agreement, Licensee may substitute the following text (omitting the quotes): "Python 1.6.1 is made available subject to the terms and conditions in CNRI's License Agreement. This Agreement together with Python 1.6.1 may be located on the Internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the Internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 1.6.1. 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. This License Agreement shall be governed by the federal intellectual property law of the United States, including without limitation the federal copyright law, and, to the extent such U.S. federal law does not apply, by the law of the Commonwealth of Virginia, excluding Virginia's conflict of law provisions. Notwithstanding the foregoing, with regard to derivative works based on Python 1.6.1 that incorporate non-separable material that was previously distributed under the GNU General Public License (GPL), the law of the Commonwealth of Virginia shall govern this License Agreement only as to issues arising under or with respect to Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between CNRI and Licensee. This License Agreement does not grant permission to use CNRI trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By clicking on the "ACCEPT" button where indicated, or by copying, installing or otherwise using Python 1.6.1, Licensee agrees to be bound by the terms and conditions of this License Agreement. ACCEPT CWI PERMISSIONS STATEMENT AND DISCLAIMER ---------------------------------------- Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, The Netherlands. All rights reserved. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Stichting Mathematisch Centrum or CWI not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.PenguinTV-4.2.0/feedparser/feedparser.py0000755000000000000000000032765210646750254015127 0ustar #!/usr/bin/env python """Universal feed parser Handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom feeds Visit http://feedparser.org/ for the latest version Visit http://feedparser.org/docs/ for the latest documentation Required: Python 2.1 or later Recommended: Python 2.3 or later Recommended: CJKCodecs and iconv_codec """ #__version__ = "pre-3.3-" + "$Revision: 1.1 $"[11:15] + "-cvs" __version__ = "3.3" __license__ = "Python" __copyright__ = "Copyright 2002-4, Mark Pilgrim" __author__ = "Mark Pilgrim " __contributors__ = ["Jason Diamond ", "John Beimler ", "Fazal Majid ", "Aaron Swartz "] _debug = 0 # HTTP "User-Agent" header to send to servers when downloading feeds. # If you are embedding feedparser in a larger application, you should # change this to your application name and URL. USER_AGENT = "UniversalFeedParser/%s +http://feedparser.org/" % __version__ # HTTP "Accept" header to send to servers when downloading feeds. If you don't # want to send an Accept header, set this to None. ACCEPT_HEADER = "application/atom+xml,application/rdf+xml,application/rss+xml,application/x-netcdf,application/xml;q=0.9,text/xml;q=0.2,*/*;q=0.1" # List of preferred XML parsers, by SAX driver name. These will be tried first, # but if they're not installed, Python will keep searching through its own list # of pre-installed parsers until it finds one that supports everything we need. PREFERRED_XML_PARSERS = ["drv_libxml2"] # If you want feedparser to automatically run HTML markup through HTML Tidy, set # this to 1. This is off by default because of reports of crashing on some # platforms. If it crashes for you, please submit a bug report with your OS # platform, Python version, and the URL of the feed you were attempting to parse. # Requires mxTidy TIDY_MARKUP = 0 # ---------- required modules (should come with any Python distribution) ---------- import sgmllib, re, sys, copy, urlparse, time, rfc822, types, cgi try: from cStringIO import StringIO as _StringIO except: from StringIO import StringIO as _StringIO # ---------- optional modules (feedparser will work without these, but with reduced functionality) ---------- # gzip is included with most Python distributions, but may not be available if you compiled your own try: import gzip except: gzip = None try: import zlib except: zlib = None # timeoutsocket allows feedparser to time out rather than hang forever on ultra-slow servers. # Python 2.3 now has this functionality available in the standard socket library, so under # 2.3 you don't need to install anything. But you probably should anyway, because the socket # module is buggy and timeoutsocket is better. try: import timeoutsocket # http://www.timo-tasi.org/python/timeoutsocket.py timeoutsocket.setDefaultSocketTimeout(20) except ImportError: import socket if hasattr(socket, 'setdefaulttimeout'): socket.setdefaulttimeout(20) import urllib, urllib2 _mxtidy = None if TIDY_MARKUP: try: from mx.Tidy import Tidy as _mxtidy except: pass # If a real XML parser is available, feedparser will attempt to use it. feedparser has # been tested with the built-in SAX parser, PyXML, and libxml2. On platforms where the # Python distribution does not come with an XML parser (such as Mac OS X 10.2 and some # versions of FreeBSD), feedparser will quietly fall back on regex-based parsing. try: import xml.sax xml.sax.make_parser(PREFERRED_XML_PARSERS) # test for valid parsers from xml.sax.saxutils import escape as _xmlescape _XML_AVAILABLE = 1 except: _XML_AVAILABLE = 0 def _xmlescape(data): data = data.replace("&", "&") data = data.replace(">", ">") data = data.replace("<", "<") return data # base64 support for Atom feeds that contain embedded binary data try: import base64, binascii except: base64 = binascii = None # cjkcodecs and iconv_codec provide support for more character encodings. # Both are available from http://cjkpython.i18n.org/ try: import cjkcodecs.aliases except: pass try: import iconv_codec except: pass # ---------- don't touch these ---------- class CharacterEncodingOverride(Exception): pass class CharacterEncodingUnknown(Exception): pass class NonXMLContentType(Exception): pass sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*') sgmllib.special = re.compile('" % (tag, "".join([' %s="%s"' % t for t in attrs])), escape=0) # match namespaces if tag.find(':') <> -1: prefix, suffix = tag.split(':', 1) else: prefix, suffix = '', tag prefix = self.namespacemap.get(prefix, prefix) if prefix: prefix = prefix + '_' # special hack for better tracking of empty textinput/image elements in illformed feeds if (not prefix) and tag not in ('title', 'link', 'description', 'name'): self.intextinput = 0 if (not prefix) and tag not in ('title', 'link', 'description', 'url', 'width', 'height'): self.inimage = 0 # call special handler (if defined) or default handler methodname = '_start_' + prefix + suffix try: method = getattr(self, methodname) return method(attrsD) except AttributeError: return self.push(prefix + suffix, 1) def unknown_endtag(self, tag): if _debug: sys.stderr.write('end %s\n' % tag) # match namespaces if tag.find(':') <> -1: prefix, suffix = tag.split(':', 1) else: prefix, suffix = '', tag prefix = self.namespacemap.get(prefix, prefix) if prefix: prefix = prefix + '_' # call special handler (if defined) or default handler methodname = '_end_' + prefix + suffix try: method = getattr(self, methodname) method() except AttributeError: self.pop(prefix + suffix) # track inline content if self.incontent and self.contentparams.get('mode') == 'escaped': # element declared itself as escaped markup, but it isn't really self.contentparams['mode'] = 'xml' if self.incontent and self.contentparams.get('mode') == 'xml': tag = tag.split(':')[-1] self.handle_data("" % tag, escape=0) # track xml:base and xml:lang going out of scope if self.basestack: self.basestack.pop() if self.basestack and self.basestack[-1]: self.baseuri = self.basestack[-1] if self.langstack: self.langstack.pop() if self.langstack: # and (self.langstack[-1] is not None): self.lang = self.langstack[-1] def handle_charref(self, ref): # called for each character reference, e.g. for " ", ref will be "160" if not self.elementstack: return ref = ref.lower() if ref in ('34', '38', '39', '60', '62', 'x22', 'x26', 'x27', 'x3c', 'x3e'): text = "&#%s;" % ref else: if ref[0] == 'x': c = int(ref[1:], 16) else: c = int(ref) text = unichr(c).encode('utf-8') self.elementstack[-1][2].append(text) def handle_entityref(self, ref): # called for each entity reference, e.g. for "©", ref will be "copy" if not self.elementstack: return if _debug: sys.stderr.write("entering handle_entityref with %s\n" % ref) if ref in ('lt', 'gt', 'quot', 'amp', 'apos'): text = '&%s;' % ref else: # entity resolution graciously donated by Aaron Swartz def name2cp(k): import htmlentitydefs if hasattr(htmlentitydefs, "name2codepoint"): # requires Python 2.3 return htmlentitydefs.name2codepoint[k] k = htmlentitydefs.entitydefs[k] if k.startswith("&#") and k.endswith(";"): return int(k[2:-1]) # not in latin-1 return ord(k) try: name2cp(ref) except KeyError: text = "&%s;" % ref else: text = unichr(name2cp(ref)).encode('utf-8') self.elementstack[-1][2].append(text) def handle_data(self, text, escape=1): # called for each block of plain text, i.e. outside of any tag and # not containing any character or entity references if not self.elementstack: return if escape and self.contentparams.get('mode') == 'xml': text = _xmlescape(text) self.elementstack[-1][2].append(text) def handle_comment(self, text): # called for each comment, e.g. pass def handle_pi(self, text): # called for each processing instruction, e.g. pass def handle_decl(self, text): pass def parse_declaration(self, i): # override internal declaration handler to handle CDATA blocks if _debug: sys.stderr.write("entering parse_declaration\n") if self.rawdata[i:i+9] == '', i) if k == -1: k = len(self.rawdata) self.handle_data(_xmlescape(self.rawdata[i+9:k]), 0) return k+3 else: k = self.rawdata.find('>', i) return k+1 def trackNamespace(self, prefix, uri): if (prefix, uri) == (None, 'http://my.netscape.com/rdf/simple/0.9/') and not self.version: self.version = 'rss090' if uri == 'http://purl.org/rss/1.0/' and not self.version: self.version = 'rss10' if not prefix: return if uri.find('backend.userland.com/rss') <> -1: # match any backend.userland.com namespace uri = 'http://backend.userland.com/rss' if self.namespaces.has_key(uri): self.namespacemap[prefix] = self.namespaces[uri] def resolveURI(self, uri): return urlparse.urljoin(self.baseuri or '', uri) def decodeEntities(self, element, data): return data def push(self, element, expectingText): self.elementstack.append([element, expectingText, []]) def pop(self, element): if not self.elementstack: return if self.elementstack[-1][0] != element: return element, expectingText, pieces = self.elementstack.pop() output = "".join(pieces) output = output.strip() if not expectingText: return output # decode base64 content if self.contentparams.get('mode') == 'base64' and base64: try: output = base64.decodestring(output) except binascii.Error: pass except binascii.Incomplete: pass # resolve relative URIs if (element in self.can_be_relative_uri) and output: output = self.resolveURI(output) # decode entities within embedded markup output = self.decodeEntities(element, output) # resolve relative URIs within embedded markup if self.contentparams.get('type', 'text/html') in self.html_types: if element in self.can_contain_relative_uris: output = _resolveRelativeURIs(output, self.baseuri, self.encoding) # sanitize embedded markup if self.contentparams.get('type', 'text/html') in self.html_types: if element in self.can_contain_dangerous_markup: output = _sanitizeHTML(output, self.encoding) if self.encoding and (type(output) == types.StringType): try: output = unicode(output, self.encoding) except: pass # store output in appropriate place(s) if self.inentry: if element == 'content': self.entries[-1].setdefault(element, []) contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output self.entries[-1][element].append(contentparams) elif element == 'category': self.entries[-1][element] = output domain = self.entries[-1]['categories'][-1][0] self.entries[-1]['categories'][-1] = (domain, output) elif element == 'source': self.entries[-1]['source']['value'] = output elif element == 'link': self.entries[-1][element] = output if output: self.entries[-1]['links'][-1]['href'] = output else: if element == 'description': element = 'summary' self.entries[-1][element] = output if self.incontent: contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output self.entries[-1][element + '_detail'] = contentparams elif self.infeed and (not self.intextinput) and (not self.inimage): if element == 'description': element = 'tagline' self.feeddata[element] = output if element == 'category': domain = self.feeddata['categories'][-1][0] self.feeddata['categories'][-1] = (domain, output) elif element == 'link': self.feeddata['links'][-1]['href'] = output elif self.incontent: contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output self.feeddata[element + '_detail'] = contentparams return output def _mapToStandardPrefix(self, name): colonpos = name.find(':') if colonpos <> -1: prefix = name[:colonpos] suffix = name[colonpos+1:] prefix = self.namespacemap.get(prefix, prefix) name = prefix + ':' + suffix return name def _getAttribute(self, attrsD, name): return attrsD.get(self._mapToStandardPrefix(name)) def _save(self, key, value): if self.inentry: self.entries[-1].setdefault(key, value) elif self.feeddata: self.feeddata.setdefault(key, value) def _start_rss(self, attrsD): versionmap = {'0.91': 'rss091u', '0.92': 'rss092', '0.93': 'rss093', '0.94': 'rss094'} if not self.version: attr_version = attrsD.get('version', '') version = versionmap.get(attr_version) if version: self.version = version elif attr_version.startswith('2.'): self.version = 'rss20' else: self.version = 'rss' def _start_dlhottitles(self, attrsD): self.version = 'hotrss' def _start_channel(self, attrsD): self.infeed = 1 self._cdf_common(attrsD) _start_feedinfo = _start_channel def _cdf_common(self, attrsD): if attrsD.has_key('lastmod'): self._start_modified({}) self.elementstack[-1][-1] = attrsD['lastmod'] self._end_modified() if attrsD.has_key('href'): self._start_link({}) self.elementstack[-1][-1] = attrsD['href'] self._end_link() def _start_feed(self, attrsD): self.infeed = 1 versionmap = {'0.1': 'atom01', '0.2': 'atom02', '0.3': 'atom03'} if not self.version: attr_version = attrsD.get('version') version = versionmap.get(attr_version) if version: self.version = version else: self.version = 'atom' def _end_channel(self): self.infeed = 0 _end_feed = _end_channel def _start_image(self, attrsD): self.inimage = 1 self.push('image', 0) context = self._getContext() context.setdefault('image', FeedParserDict()) def _end_image(self): self.pop('image') self.inimage = 0 def _start_textinput(self, attrsD): self.intextinput = 1 self.push('textinput', 0) context = self._getContext() context.setdefault('textinput', FeedParserDict()) _start_textInput = _start_textinput def _end_textinput(self): self.pop('textinput') self.intextinput = 0 _end_textInput = _end_textinput def _start_author(self, attrsD): self.inauthor = 1 self.push('author', 1) _start_managingeditor = _start_author _start_dc_author = _start_author _start_dc_creator = _start_author def _end_author(self): self.pop('author') self.inauthor = 0 self._sync_author_detail() _end_managingeditor = _end_author _end_dc_author = _end_author _end_dc_creator = _end_author def _start_contributor(self, attrsD): self.incontributor = 1 context = self._getContext() context.setdefault('contributors', []) context['contributors'].append(FeedParserDict()) self.push('contributor', 0) def _end_contributor(self): self.pop('contributor') self.incontributor = 0 def _start_name(self, attrsD): self.push('name', 0) def _end_name(self): value = self.pop('name') if self.inauthor: self._save_author('name', value) elif self.incontributor: self._save_contributor('name', value) elif self.intextinput: context = self._getContext() context['textinput']['name'] = value def _start_width(self, attrsD): self.push('width', 0) def _end_width(self): value = self.pop('width') try: value = int(value) except: value = 0 if self.inimage: context = self._getContext() context['image']['width'] = value def _start_height(self, attrsD): self.push('height', 0) def _end_height(self): value = self.pop('height') try: value = int(value) except: value = 0 if self.inimage: context = self._getContext() context['image']['height'] = value def _start_url(self, attrsD): self.push('url', 1) _start_homepage = _start_url _start_uri = _start_url def _end_url(self): value = self.pop('url') if self.inauthor: self._save_author('url', value) elif self.incontributor: self._save_contributor('url', value) elif self.inimage: context = self._getContext() context['image']['url'] = value elif self.intextinput: context = self._getContext() context['textinput']['link'] = value _end_homepage = _end_url _end_uri = _end_url def _start_email(self, attrsD): self.push('email', 0) def _end_email(self): value = self.pop('email') if self.inauthor: self._save_author('email', value) elif self.incontributor: self._save_contributor('email', value) pass def _getContext(self): if self.inentry: context = self.entries[-1] else: context = self.feeddata return context def _save_author(self, key, value): context = self._getContext() context.setdefault('author_detail', FeedParserDict()) context['author_detail'][key] = value self._sync_author_detail() def _save_contributor(self, key, value): context = self._getContext() context.setdefault('contributors', [FeedParserDict()]) context['contributors'][-1][key] = value def _sync_author_detail(self, key='author'): context = self._getContext() detail = context.get('%s_detail' % key) if detail: name = detail.get('name') email = detail.get('email') if name and email: context[key] = "%s (%s)" % (name, email) elif name: context[key] = name elif email: context[key] = email else: author = context.get(key) if not author: return emailmatch = re.search(r"""(([a-zA-Z0-9\_\-\.\+]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?))""", author) if not emailmatch: return email = emailmatch.group(0) # probably a better way to do the following, but it passes all the tests author = author.replace(email, '') author = author.replace('()', '') author = author.strip() if author and (author[0] == '('): author = author[1:] if author and (author[-1] == ')'): author = author[:-1] author = author.strip() context.setdefault('%s_detail' % key, FeedParserDict()) context['%s_detail' % key]['name'] = author context['%s_detail' % key]['email'] = email def _start_tagline(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('tagline', 1) _start_subtitle = _start_tagline def _end_tagline(self): value = self.pop('tagline') self.incontent -= 1 self.contentparams.clear() if self.infeed: self.feeddata['description'] = value _end_subtitle = _end_tagline def _start_copyright(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('copyright', 1) _start_dc_rights = _start_copyright def _end_copyright(self): self.pop('copyright') self.incontent -= 1 self.contentparams.clear() _end_dc_rights = _end_copyright def _start_item(self, attrsD): self.entries.append(FeedParserDict()) self.push('item', 0) self.inentry = 1 self.guidislink = 0 id = self._getAttribute(attrsD, 'rdf:about') if id: context = self._getContext() context['id'] = id self._cdf_common(attrsD) _start_entry = _start_item _start_product = _start_item def _end_item(self): self.pop('item') self.inentry = 0 _end_entry = _end_item def _start_dc_language(self, attrsD): self.push('language', 1) _start_language = _start_dc_language def _end_dc_language(self): self.lang = self.pop('language') _end_language = _end_dc_language def _start_dc_publisher(self, attrsD): self.push('publisher', 1) _start_webmaster = _start_dc_publisher def _end_dc_publisher(self): self.pop('publisher') self._sync_author_detail('publisher') _end_webmaster = _end_dc_publisher def _start_dcterms_issued(self, attrsD): self.push('issued', 1) _start_issued = _start_dcterms_issued def _end_dcterms_issued(self): value = self.pop('issued') self._save('issued_parsed', _parse_date(value)) _end_issued = _end_dcterms_issued def _start_dcterms_created(self, attrsD): self.push('created', 1) _start_created = _start_dcterms_created def _end_dcterms_created(self): value = self.pop('created') self._save('created_parsed', _parse_date(value)) _end_created = _end_dcterms_created def _start_dcterms_modified(self, attrsD): self.push('modified', 1) _start_modified = _start_dcterms_modified _start_dc_date = _start_dcterms_modified _start_pubdate = _start_dcterms_modified def _end_dcterms_modified(self): value = self.pop('modified') parsed_value = _parse_date(value) self._save('modified_parsed', parsed_value) _end_modified = _end_dcterms_modified _end_dc_date = _end_dcterms_modified _end_pubdate = _end_dcterms_modified def _start_expirationdate(self, attrsD): self.push('expired', 1) def _end_expirationdate(self): self._save('expired_parsed', _parse_date(self.pop('expired'))) def _start_cc_license(self, attrsD): self.push('license', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('license') def _start_creativecommons_license(self, attrsD): self.push('license', 1) def _end_creativecommons_license(self): self.pop('license') def _start_category(self, attrsD): self.push('category', 1) domain = self._getAttribute(attrsD, 'domain') cats = [] if self.inentry: cats = self.entries[-1].setdefault('categories', []) elif self.infeed: cats = self.feeddata.setdefault('categories', []) cats.append((domain, None)) _start_dc_subject = _start_category _start_keywords = _start_category def _end_category(self): self.pop('category') _end_dc_subject = _end_category _end_keywords = _end_category def _start_cloud(self, attrsD): self.feeddata['cloud'] = FeedParserDict(attrsD) def _start_link(self, attrsD): attrsD.setdefault('rel', 'alternate') attrsD.setdefault('type', 'text/html') if attrsD.has_key('href'): attrsD['href'] = self.resolveURI(attrsD['href']) expectingText = self.infeed or self.inentry if self.inentry: self.entries[-1].setdefault('links', []) self.entries[-1]['links'].append(FeedParserDict(attrsD)) elif self.infeed: self.feeddata.setdefault('links', []) self.feeddata['links'].append(FeedParserDict(attrsD)) if attrsD.has_key('href'): expectingText = 0 if attrsD.get('type', '') in self.html_types: if self.inentry: self.entries[-1]['link'] = attrsD['href'] elif self.infeed: self.feeddata['link'] = attrsD['href'] else: self.push('link', expectingText) _start_producturl = _start_link def _end_link(self): value = self.pop('link') if self.intextinput: context = self._getContext() context['textinput']['link'] = value if self.inimage: context = self._getContext() context['image']['link'] = value _end_producturl = _end_link def _start_guid(self, attrsD): self.guidislink = (attrsD.get('ispermalink', 'true') == 'true') self.push('id', 1) def _end_guid(self): value = self.pop('id') self._save('guidislink', self.guidislink and not self._getContext().has_key('link')) if self.guidislink: # guid acts as link, but only if "ispermalink" is not present or is "true", # and only if the item doesn't already have a link element self._save('link', value) def _start_id(self, attrsD): self.push('id', 1) def _end_id(self): value = self.pop('id') def _start_title(self, attrsD): self.incontent += 1 if _debug: sys.stderr.write('attrsD.xml:lang = %s\n' % attrsD.get('xml:lang')) if _debug: sys.stderr.write('self.lang = %s\n' % self.lang) self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('title', self.infeed or self.inentry) _start_dc_title = _start_title def _end_title(self): value = self.pop('title') self.incontent -= 1 self.contentparams.clear() if self.intextinput: context = self._getContext() context['textinput']['title'] = value elif self.inimage: context = self._getContext() context['image']['title'] = value _end_dc_title = _end_title def _start_description(self, attrsD, default_content_type='text/html'): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', default_content_type), 'language': self.lang, 'base': self.baseuri}) self.push('description', self.infeed or self.inentry) def _start_abstract(self, attrsD): return self._start_description(attrsD, 'text/plain') def _end_description(self): value = self.pop('description') self.incontent -= 1 self.contentparams.clear() context = self._getContext() if self.intextinput: context['textinput']['description'] = value elif self.inimage: context['image']['description'] = value # elif self.inentry: # context['summary'] = value # elif self.infeed: # context['tagline'] = value _end_abstract = _end_description def _start_info(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('info', 1) def _end_info(self): self.pop('info') self.incontent -= 1 self.contentparams.clear() def _start_generator(self, attrsD): if attrsD: if attrsD.has_key('url'): attrsD['url'] = self.resolveURI(attrsD['url']) self.feeddata['generator_detail'] = FeedParserDict(attrsD) self.push('generator', 1) def _end_generator(self): value = self.pop('generator') if self.feeddata.has_key('generator_detail'): self.feeddata['generator_detail']['name'] = value def _start_admin_generatoragent(self, attrsD): self.push('generator', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('generator') self.feeddata['generator_detail'] = FeedParserDict({"url": value}) def _start_admin_errorreportsto(self, attrsD): self.push('errorreportsto', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('errorreportsto') def _start_summary(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'escaped'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('summary', 1) def _end_summary(self): value = self.pop('summary') if self.entries: self.entries[-1]['description'] = value self.incontent -= 1 self.contentparams.clear() def _start_enclosure(self, attrsD): if self.inentry: self.entries[-1].setdefault('enclosures', []) self.entries[-1]['enclosures'].append(FeedParserDict(attrsD)) def _start_source(self, attrsD): if self.inentry: self.entries[-1]['source'] = FeedParserDict(attrsD) self.push('source', 1) def _end_source(self): self.pop('source') def _start_content(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'xml'), 'type': attrsD.get('type', 'text/plain'), 'language': self.lang, 'base': self.baseuri}) self.push('content', 1) def _start_prodlink(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': attrsD.get('mode', 'xml'), 'type': attrsD.get('type', 'text/html'), 'language': self.lang, 'base': self.baseuri}) self.push('content', 1) def _start_body(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': 'xml', 'type': 'application/xhtml+xml', 'language': self.lang, 'base': self.baseuri}) self.push('content', 1) _start_xhtml_body = _start_body def _start_content_encoded(self, attrsD): self.incontent += 1 self.contentparams = FeedParserDict({'mode': 'escaped', 'type': 'text/html', 'language': self.lang, 'base': self.baseuri}) self.push('content', 1) _start_fullitem = _start_content_encoded def _end_content(self): value = self.pop('content') if self.contentparams.get('type') in (['text/plain'] + self.html_types): self._save('description', value) self.incontent -= 1 self.contentparams.clear() _end_body = _end_content _end_xhtml_body = _end_content _end_content_encoded = _end_content _end_fullitem = _end_content _end_prodlink = _end_content if _XML_AVAILABLE: class _StrictFeedParser(_FeedParserMixin, xml.sax.handler.ContentHandler): def __init__(self, baseuri, baselang, encoding): if _debug: sys.stderr.write('trying StrictFeedParser\n') xml.sax.handler.ContentHandler.__init__(self) _FeedParserMixin.__init__(self, baseuri, baselang, encoding) self.bozo = 0 self.exc = None def startPrefixMapping(self, prefix, uri): self.trackNamespace(prefix, uri) def startElementNS(self, name, qname, attrs): namespace, localname = name namespace = str(namespace or '') if namespace.find('backend.userland.com/rss') <> -1: # match any backend.userland.com namespace namespace = 'http://backend.userland.com/rss' prefix = self.namespaces.get(namespace, 'unknown') if prefix: localname = prefix + ':' + localname localname = str(localname).lower() # qname implementation is horribly broken in Python 2.1 (it # doesn't report any), and slightly broken in Python 2.2 (it # doesn't report the xml: namespace). So we match up namespaces # with a known list first, and then possibly override them with # the qnames the SAX parser gives us (if indeed it gives us any # at all). Thanks to MatejC for helping me test this and # tirelessly telling me that it didn't work yet. attrsD = {} for (namespace, attrlocalname), attrvalue in attrs._attrs.items(): prefix = self.namespaces.get(namespace, '') if prefix: attrlocalname = prefix + ":" + attrlocalname attrsD[str(attrlocalname).lower()] = attrvalue for qname in attrs.getQNames(): attrsD[str(qname).lower()] = attrs.getValueByQName(qname) self.unknown_starttag(localname, attrsD.items()) # def resolveEntity(self, publicId, systemId): # return _StringIO() def characters(self, text): self.handle_data(text) def endElementNS(self, name, qname): namespace, localname = name namespace = str(namespace) prefix = self.namespaces.get(namespace, '') if prefix: localname = prefix + ':' + localname localname = str(localname).lower() self.unknown_endtag(localname) def error(self, exc): self.bozo = 1 self.exc = exc def fatalError(self, exc): self.error(exc) raise exc class _BaseHTMLProcessor(sgmllib.SGMLParser): elements_no_end_tag = ['area', 'base', 'basefont', 'br', 'col', 'frame', 'hr', 'img', 'input', 'isindex', 'link', 'meta', 'param'] def __init__(self, encoding): self.encoding = encoding if _debug: sys.stderr.write('entering BaseHTMLProcessor, encoding=%s\n' % self.encoding) sgmllib.SGMLParser.__init__(self) def reset(self): self.pieces = [] sgmllib.SGMLParser.reset(self) def feed(self, data): data = re.compile(r'', r'<\1>', data) data = data.replace(''', "'") data = data.replace('"', '"') if self.encoding and (type(data) == types.UnicodeType): data = data.encode(self.encoding) sgmllib.SGMLParser.feed(self, data) def normalize_attrs(self, attrs): # utility method to be called by descendants attrs = [(k.lower(), v) for k, v in attrs] # if self.encoding: # if _debug: sys.stderr.write('normalize_attrs, encoding=%s\n' % self.encoding) # attrs = [(k, v.encode(self.encoding)) for k, v in attrs] attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs] return attrs def unknown_starttag(self, tag, attrs): # called for each start tag # attrs is a list of (attr, value) tuples # e.g. for
, tag="pre", attrs=[("class", "screen")]
        if _debug: sys.stderr.write('_BaseHTMLProcessor, unknown_starttag, tag=%s\n' % tag)
        strattrs = "".join([' %s="%s"' % (key, value) for key, value in attrs])
        if tag in self.elements_no_end_tag:
            self.pieces.append("<%(tag)s%(strattrs)s />" % locals())
        else:
            self.pieces.append("<%(tag)s%(strattrs)s>" % locals())
        
    def unknown_endtag(self, tag):
        # called for each end tag, e.g. for 
, tag will be "pre" # Reconstruct the original end tag. if tag not in self.elements_no_end_tag: self.pieces.append("" % locals()) def handle_charref(self, ref): # called for each character reference, e.g. for " ", ref will be "160" # Reconstruct the original character reference. self.pieces.append("&#%(ref)s;" % locals()) def handle_entityref(self, ref): # called for each entity reference, e.g. for "©", ref will be "copy" # Reconstruct the original entity reference. self.pieces.append("&%(ref)s;" % locals()) def handle_data(self, text): # called for each block of plain text, i.e. outside of any tag and # not containing any character or entity references # Store the original text verbatim. if _debug: sys.stderr.write('_BaseHTMLProcessor, handle_text, text=%s\n' % text) self.pieces.append(text) def handle_comment(self, text): # called for each HTML comment, e.g. # Reconstruct the original comment. self.pieces.append("" % locals()) def handle_pi(self, text): # called for each processing instruction, e.g. # Reconstruct original processing instruction. self.pieces.append("" % locals()) def handle_decl(self, text): # called for the DOCTYPE, if present, e.g. # # Reconstruct original DOCTYPE self.pieces.append("" % locals()) _new_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9:]*\s*').match def _scan_name(self, i, declstartpos): rawdata = self.rawdata n = len(rawdata) if i == n: return None, -1 m = self._new_declname_match(rawdata, i) if m: s = m.group() name = s.strip() if (i + len(s)) == n: return None, -1 # end of buffer return name.lower(), m.end() else: self.handle_data(rawdata) # self.updatepos(declstartpos, i) return None, -1 def output(self): """Return processed HTML as a single string""" return "".join([str(p) for p in self.pieces]) class _LooseFeedParser(_FeedParserMixin, _BaseHTMLProcessor): def __init__(self, baseuri, baselang, encoding): sgmllib.SGMLParser.__init__(self) _FeedParserMixin.__init__(self, baseuri, baselang, encoding) def decodeEntities(self, element, data): data = data.replace('<', '<') data = data.replace('<', '<') data = data.replace('>', '>') data = data.replace('>', '>') data = data.replace('&', '&') data = data.replace('&', '&') data = data.replace('"', '"') data = data.replace('"', '"') data = data.replace(''', ''') data = data.replace(''', ''') if self.contentparams.get('mode') == 'escaped': data = data.replace('<', '<') data = data.replace('>', '>') data = data.replace('&', '&') data = data.replace('"', '"') data = data.replace(''', "'") return data class _RelativeURIResolver(_BaseHTMLProcessor): relative_uris = [('a', 'href'), ('applet', 'codebase'), ('area', 'href'), ('blockquote', 'cite'), ('body', 'background'), ('del', 'cite'), ('form', 'action'), ('frame', 'longdesc'), ('frame', 'src'), ('iframe', 'longdesc'), ('iframe', 'src'), ('head', 'profile'), ('img', 'longdesc'), ('img', 'src'), ('img', 'usemap'), ('input', 'src'), ('input', 'usemap'), ('ins', 'cite'), ('link', 'href'), ('object', 'classid'), ('object', 'codebase'), ('object', 'data'), ('object', 'usemap'), ('q', 'cite'), ('script', 'src')] def __init__(self, baseuri, encoding): _BaseHTMLProcessor.__init__(self, encoding) self.baseuri = baseuri def resolveURI(self, uri): return urlparse.urljoin(self.baseuri, uri) def unknown_starttag(self, tag, attrs): attrs = self.normalize_attrs(attrs) attrs = [(key, ((tag, key) in self.relative_uris) and self.resolveURI(value) or value) for key, value in attrs] _BaseHTMLProcessor.unknown_starttag(self, tag, attrs) def _resolveRelativeURIs(htmlSource, baseURI, encoding): if _debug: sys.stderr.write("entering _resolveRelativeURIs\n") p = _RelativeURIResolver(baseURI, encoding) p.feed(htmlSource) return p.output() class _HTMLSanitizer(_BaseHTMLProcessor): acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'b', 'big', 'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code', 'col', 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'fieldset', 'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins', 'kbd', 'label', 'legend', 'li', 'map', 'menu', 'ol', 'optgroup', 'option', 'p', 'pre', 'q', 's', 'samp', 'select', 'small', 'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var'] acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey', 'action', 'align', 'alt', 'axis', 'border', 'cellpadding', 'cellspacing', 'char', 'charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'cols', 'colspan', 'color', 'compact', 'coords', 'datetime', 'dir', 'disabled', 'enctype', 'for', 'frame', 'headers', 'height', 'href', 'hreflang', 'hspace', 'id', 'ismap', 'label', 'lang', 'longdesc', 'maxlength', 'media', 'method', 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'prompt', 'readonly', 'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src', 'start', 'summary', 'tabindex', 'target', 'title', 'type', 'usemap', 'valign', 'value', 'vspace', 'width'] unacceptable_elements_with_end_tag = ['script', 'applet'] def reset(self): _BaseHTMLProcessor.reset(self) self.unacceptablestack = 0 def unknown_starttag(self, tag, attrs): if not tag in self.acceptable_elements: if tag in self.unacceptable_elements_with_end_tag: self.unacceptablestack += 1 return attrs = self.normalize_attrs(attrs) attrs = [(key, value) for key, value in attrs if key in self.acceptable_attributes] _BaseHTMLProcessor.unknown_starttag(self, tag, attrs) def unknown_endtag(self, tag): if not tag in self.acceptable_elements: if tag in self.unacceptable_elements_with_end_tag: self.unacceptablestack -= 1 return _BaseHTMLProcessor.unknown_endtag(self, tag) def handle_pi(self, text): pass def handle_decl(self, text): pass def handle_data(self, text): if not self.unacceptablestack: _BaseHTMLProcessor.handle_data(self, text) def _sanitizeHTML(htmlSource, encoding): p = _HTMLSanitizer(encoding) p.feed(htmlSource) data = p.output() if _mxtidy and TIDY_MARKUP: nerrors, nwarnings, data, errordata = _mxtidy.tidy(data, output_xhtml=1, numeric_entities=1, wrap=0) if data.count(''): data = data.split('>', 1)[1] if data.count(' stream This function lets you define parsers that take any input source (URL, pathname to local or network file, or actual data as a string) and deal with it in a uniform manner. Returned object is guaranteed to have all the basic stdio read methods (read, readline, readlines). Just .close() the object when you're done with it. If the etag argument is supplied, it will be used as the value of an If-None-Match request header. If the modified argument is supplied, it must be a tuple of 9 integers as returned by gmtime() in the standard Python time module. This MUST be in GMT (Greenwich Mean Time). The formatted date/time will be used as the value of an If-Modified-Since request header. If the agent argument is supplied, it will be used as the value of a User-Agent request header. If the referrer argument is supplied, it will be used as the value of a Referer[sic] request header. If handlers is supplied, it is a list of handlers used to build a urllib2 opener. """ if hasattr(url_file_stream_or_string, "read"): return url_file_stream_or_string if url_file_stream_or_string == "-": return sys.stdin if urlparse.urlparse(url_file_stream_or_string)[0] in ('http', 'https', 'ftp'): if not agent: agent = USER_AGENT # test for inline user:password for basic auth auth = None if base64: urltype, rest = urllib.splittype(url_file_stream_or_string) realhost, rest = urllib.splithost(rest) if realhost: user_passwd, realhost = urllib.splituser(realhost) if user_passwd: url_file_stream_or_string = "%s://%s%s" % (urltype, realhost, rest) auth = base64.encodestring(user_passwd).strip() # try to open with urllib2 (to use optional headers) request = urllib2.Request(url_file_stream_or_string) request.add_header("User-Agent", agent) if etag: request.add_header("If-None-Match", etag) if modified: # format into an RFC 1123-compliant timestamp. We can't use # time.strftime() since the %a and %b directives can be affected # by the current locale, but RFC 2616 states that dates must be # in English. short_weekdays = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] request.add_header("If-Modified-Since", "%s, %02d %s %04d %02d:%02d:%02d GMT" % (short_weekdays[modified[6]], modified[2], months[modified[1] - 1], modified[0], modified[3], modified[4], modified[5])) if referrer: request.add_header("Referer", referrer) if gzip and zlib: request.add_header("Accept-encoding", "gzip, deflate") elif gzip: request.add_header("Accept-encoding", "gzip") elif zlib: request.add_header("Accept-encoding", "deflate") else: request.add_header("Accept-encoding", "") if auth: request.add_header("Authorization", "Basic %s" % auth) if ACCEPT_HEADER: request.add_header("Accept", ACCEPT_HEADER) opener = apply(urllib2.build_opener, tuple([_FeedURLHandler()] + handlers)) opener.addheaders = [] # RMK - must clear so we only send our custom User-Agent try: return opener.open(request) finally: opener.close() # JohnD # try to open with native open function (if url_file_stream_or_string is a filename) try: return open(url_file_stream_or_string) except: pass # treat url_file_stream_or_string as string return _StringIO(str(url_file_stream_or_string)) _date_handlers = [] def registerDateHandler(func): """Register a date handler function (takes string, returns 9-tuple date in GMT)""" _date_handlers.insert(0, func) # ISO-8601 date parsing routines written by Fazal Majid. # The ISO 8601 standard is very convoluted and irregular - a full ISO 8601 # parser is beyond the scope of feedparser and would be a worthwhile addition # to the Python library. # A single regular expression cannot parse ISO 8601 date formats into groups # as the standard is highly irregular (for instance is 030104 2003-01-04 or # 0301-04-01), so we use templates instead. # Please note the order in templates is significant because we need a # greedy match. _iso8601_tmpl = ['YYYY-?MM-?DD', 'YYYY-MM', 'YYYY-?OOO', 'YY-?MM-?DD', 'YY-?OOO', 'YYYY', '-YY-?MM', '-OOO', '-YY', '--MM-?DD', '--MM', '---DD', 'CC', ''] _iso8601_re = [ tmpl.replace( 'YYYY', r'(?P\d{4})').replace( 'YY', r'(?P\d\d)').replace( 'MM', r'(?P[01]\d)').replace( 'DD', r'(?P[0123]\d)').replace( 'OOO', r'(?P[0123]\d\d)').replace( 'CC', r'(?P\d\d$)') + r'(T?(?P\d{2}):(?P\d{2})' + r'(:(?P\d{2}))?' + r'(?P[+-](?P\d{2})(:(?P\d{2}))?|Z)?)?' for tmpl in _iso8601_tmpl] del tmpl _iso8601_matches = [re.compile(regex).match for regex in _iso8601_re] del regex def _parse_date_iso8601(dateString): """Parse a variety of ISO-8601-compatible formats like 20040105""" m = None for _iso8601_match in _iso8601_matches: m = _iso8601_match(dateString) if m: break if not m: return if m.span() == (0, 0): return params = m.groupdict() ordinal = params.get("ordinal", 0) if ordinal: ordinal = int(ordinal) else: ordinal = 0 year = params.get("year", "--") if not year or year == "--": year = time.gmtime()[0] elif len(year) == 2: # ISO 8601 assumes current century, i.e. 93 -> 2093, NOT 1993 year = 100 * int(time.gmtime()[0] / 100) + int(year) else: year = int(year) month = params.get("month", "-") if not month or month == "-": # ordinals are NOT normalized by mktime, we simulate them # by setting month=1, day=ordinal if ordinal: month = 1 else: month = time.gmtime()[1] month = int(month) day = params.get("day", 0) if not day: # see above if ordinal: day = ordinal elif params.get("century", 0) or \ params.get("year", 0) or params.get("month", 0): day = 1 else: day = time.gmtime()[2] else: day = int(day) # special case of the century - is the first year of the 21st century # 2000 or 2001 ? The debate goes on... if "century" in params.keys(): year = (int(params["century"]) - 1) * 100 + 1 # in ISO 8601 most fields are optional for field in ["hour", "minute", "second", "tzhour", "tzmin"]: if not params.get(field, None): params[field] = 0 hour = int(params.get("hour", 0)) minute = int(params.get("minute", 0)) second = int(params.get("second", 0)) # weekday is normalized by mktime(), we can ignore it weekday = 0 # daylight savings is complex, but not needed for feedparser's purposes # as time zones, if specified, include mention of whether it is active # (e.g. PST vs. PDT, CET). Using -1 is implementation-dependent and # and most implementations have DST bugs daylight_savings_flag = 0 tm = [year, month, day, hour, minute, second, weekday, ordinal, daylight_savings_flag] # ISO 8601 time zone adjustments tz = params.get("tz") if tz and tz != "Z": if tz[0] == "-": tm[3] += int(params.get("tzhour", 0)) tm[4] += int(params.get("tzmin", 0)) elif tz[0] == "+": tm[3] -= int(params.get("tzhour", 0)) tm[4] -= int(params.get("tzmin", 0)) else: return None # Python's time.mktime() is a wrapper around the ANSI C mktime(3c) # which is guaranteed to normalize d/m/y/h/m/s. # Many implementations have bugs, but we'll pretend they don't. return time.localtime(time.mktime(tm)) registerDateHandler(_parse_date_iso8601) # 8-bit date handling routines written by ytrewq1. _korean_year = u'\ub144' # b3e2 in euc-kr _korean_month = u'\uc6d4' # bff9 in euc-kr _korean_day = u'\uc77c' # c0cf in euc-kr _korean_am = u'\uc624\uc804' # bfc0 c0fc in euc-kr _korean_pm = u'\uc624\ud6c4' # bfc0 c8c4 in euc-kr _korean_onblog_date_re = \ re.compile('(\d{4})%s\s+(\d{2})%s\s+(\d{2})%s\s+(\d{2}):(\d{2}):(\d{2})' % \ (_korean_year, _korean_month, _korean_day)) _korean_nate_date_re = \ re.compile(u'(\d{4})-(\d{2})-(\d{2})\s+(%s|%s)\s+(\d{,2}):(\d{,2}):(\d{,2})' % \ (_korean_am, _korean_pm)) def _parse_date_onblog(dateString): """Parse a string according to the OnBlog 8-bit date format""" m = _korean_onblog_date_re.match(dateString) if not m: return w3dtfdate = "%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s" % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write("OnBlog date parsed as: %s\n" % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_onblog) def _parse_date_nate(dateString): """Parse a string according to the Nate 8-bit date format""" m = _korean_nate_date_re.match(dateString) if not m: return hour = int(m.group(5)) ampm = m.group(4) if (ampm == _korean_pm): hour += 12 hour = str(hour) if len(hour) == 1: hour = '0' + hour w3dtfdate = "%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s" % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': hour, 'minute': m.group(6), 'second': m.group(7),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write("Nate date parsed as: %s\n" % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_nate) _mssql_date_re = \ re.compile('(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})\.\d+') def _parse_date_mssql(dateString): """Parse a string according to the MS SQL date format""" m = _mssql_date_re.match(dateString) if not m: return w3dtfdate = "%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s" % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write("MS SQL date parsed as: %s\n" % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_mssql) # Unicode strings for Greek date strings _greek_months = \ { \ u'\u0399\u03b1\u03bd': u'Jan', # c9e1ed in iso-8859-7 u'\u03a6\u03b5\u03b2': u'Feb', # d6e5e2 in iso-8859-7 u'\u039c\u03ac\u03ce': u'Mar', # ccdcfe in iso-8859-7 u'\u039c\u03b1\u03ce': u'Mar', # cce1fe in iso-8859-7 u'\u0391\u03c0\u03c1': u'Apr', # c1f0f1 in iso-8859-7 u'\u039c\u03ac\u03b9': u'May', # ccdce9 in iso-8859-7 u'\u039c\u03b1\u03ca': u'May', # cce1fa in iso-8859-7 u'\u039c\u03b1\u03b9': u'May', # cce1e9 in iso-8859-7 u'\u0399\u03bf\u03cd\u03bd': u'Jun', # c9effded in iso-8859-7 u'\u0399\u03bf\u03bd': u'Jun', # c9efed in iso-8859-7 u'\u0399\u03bf\u03cd\u03bb': u'Jul', # c9effdeb in iso-8859-7 u'\u0399\u03bf\u03bb': u'Jul', # c9f9eb in iso-8859-7 u'\u0391\u03cd\u03b3': u'Aug', # c1fde3 in iso-8859-7 u'\u0391\u03c5\u03b3': u'Aug', # c1f5e3 in iso-8859-7 u'\u03a3\u03b5\u03c0': u'Sep', # d3e5f0 in iso-8859-7 u'\u039f\u03ba\u03c4': u'Oct', # cfeaf4 in iso-8859-7 u'\u039d\u03bf\u03ad': u'Nov', # cdefdd in iso-8859-7 u'\u039d\u03bf\u03b5': u'Nov', # cdefe5 in iso-8859-7 u'\u0394\u03b5\u03ba': u'Dec', # c4e5ea in iso-8859-7 } _greek_wdays = \ { \ u'\u039a\u03c5\u03c1': u'Sun', # caf5f1 in iso-8859-7 u'\u0394\u03b5\u03c5': u'Mon', # c4e5f5 in iso-8859-7 u'\u03a4\u03c1\u03b9': u'Tue', # d4f1e9 in iso-8859-7 u'\u03a4\u03b5\u03c4': u'Wed', # d4e5f4 in iso-8859-7 u'\u03a0\u03b5\u03bc': u'Thu', # d0e5ec in iso-8859-7 u'\u03a0\u03b1\u03c1': u'Fri', # d0e1f1 in iso-8859-7 u'\u03a3\u03b1\u03b2': u'Sat', # d3e1e2 in iso-8859-7 } _greek_date_format_re = \ re.compile(u'([^,]+),\s+(\d{2})\s+([^\s]+)\s+(\d{4})\s+(\d{2}):(\d{2}):(\d{2})\s+([^\s]+)') def _parse_date_greek(dateString): """Parse a string according to a Greek 8-bit date format.""" m = _greek_date_format_re.match(dateString) if not m: return try: wday = _greek_wdays[m.group(1)] month = _greek_months[m.group(3)] except: return rfc822date = "%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s" % \ {'wday': wday, 'day': m.group(2), 'month': month, 'year': m.group(4),\ 'hour': m.group(5), 'minute': m.group(6), 'second': m.group(7),\ 'zonediff': m.group(8)} if _debug: sys.stderr.write("Greek date parsed as: %s\n" % rfc822date) return _parse_date_rfc822(rfc822date) registerDateHandler(_parse_date_greek) # Unicode strings for Hungarian date strings _hungarian_months = \ { \ u'janu\u00e1r': u'01', # e1 in iso-8859-2 u'febru\u00e1ri': u'02', # e1 in iso-8859-2 u'm\u00e1rcius': u'03', # e1 in iso-8859-2 u'\u00e1prilis': u'04', # e1 in iso-8859-2 u'm\u00e1ujus': u'05', # e1 in iso-8859-2 u'j\u00fanius': u'06', # fa in iso-8859-2 u'j\u00falius': u'07', # fa in iso-8859-2 u'augusztus': u'08', u'szeptember': u'09', u'okt\u00f3ber': u'10', # f3 in iso-8859-2 u'november': u'11', u'december': u'12', } _hungarian_date_format_re = \ re.compile(u'(\d{4})-([^-]+)-(\d{,2})T(\d{,2}):(\d{2})((\+|-)(\d{,2}:\d{2}))') def _parse_date_hungarian(dateString): """Parse a string according to a Hungarian 8-bit date format.""" m = _hungarian_date_format_re.match(dateString) if not m: return try: month = _hungarian_months[m.group(2)] day = m.group(3) if len(day) == 1: day = '0' + day hour = m.group(4) if len(hour) == 1: hour = '0' + hour except: return w3dtfdate = "%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s" % \ {'year': m.group(1), 'month': month, 'day': day,\ 'hour': hour, 'minute': m.group(5),\ 'zonediff': m.group(6)} if _debug: sys.stderr.write("Hungarian date parsed as: %s\n" % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_hungarian) # W3DTF-style date parsing adapted from PyXML xml.utils.iso8601, written by # Drake and licensed under the Python license. Removed all range checking # for month, day, hour, minute, and second, since mktime will normalize # these later def _parse_date_w3dtf(dateString): def __extract_date(m): year = int(m.group("year")) if year < 100: year = 100 * int(time.gmtime()[0] / 100) + int(year) if year < 1000: return 0, 0, 0 julian = m.group("julian") if julian: julian = int(julian) month = julian / 30 + 1 day = julian % 30 + 1 jday = None while jday != julian: t = time.mktime((year, month, day, 0, 0, 0, 0, 0, 0)) jday = time.gmtime(t)[-2] diff = abs(jday - julian) if jday > julian: if diff < day: day = day - diff else: month = month - 1 day = 31 elif jday < julian: if day + diff < 28: day = day + diff else: month = month + 1 return year, month, day month = m.group("month") day = 1 if month is None: month = 1 else: month = int(month) day = m.group("day") if day: day = int(day) else: day = 1 return year, month, day def __extract_time(m): if not m: return 0, 0, 0 hours = m.group("hours") if not hours: return 0, 0, 0 hours = int(hours) minutes = int(m.group("minutes")) seconds = m.group("seconds") if seconds: seconds = int(seconds) else: seconds = 0 return hours, minutes, seconds def __extract_tzd(m): """Return the Time Zone Designator as an offset in seconds from UTC.""" if not m: return 0 tzd = m.group("tzd") if not tzd: return 0 if tzd == "Z": return 0 hours = int(m.group("tzdhours")) minutes = m.group("tzdminutes") if minutes: minutes = int(minutes) else: minutes = 0 offset = (hours*60 + minutes) * 60 if tzd[0] == "+": return -offset return offset __date_re = ("(?P\d\d\d\d)" "(?:(?P-|)" "(?:(?P\d\d\d)" "|(?P\d\d)(?:(?P=dsep)(?P\d\d))?))?") __tzd_re = "(?P[-+](?P\d\d)(?::?(?P\d\d))|Z)" __tzd_rx = re.compile(__tzd_re) __time_re = ("(?P\d\d)(?P:|)(?P\d\d)" "(?:(?P=tsep)(?P\d\d(?:[.,]\d+)?))?" + __tzd_re) __datetime_re = "%s(?:T%s)?" % (__date_re, __time_re) __datetime_rx = re.compile(__datetime_re) m = __datetime_rx.match(dateString) if (m is None) or (m.group() != dateString): return gmt = __extract_date(m) + __extract_time(m) + (0, 0, 0) if gmt[0] == 0: return return time.gmtime(time.mktime(gmt) + __extract_tzd(m) - time.timezone) registerDateHandler(_parse_date_w3dtf) def _parse_date_rfc822(dateString): """Parse an RFC822, RFC1123, RFC2822, or asctime-style date""" tm = rfc822.parsedate_tz(dateString) if tm: return time.gmtime(rfc822.mktime_tz(tm)) # rfc822.py defines several time zones, but we define some extra ones. # "ET" is equivalent to "EST", etc. _additional_timezones = {'AT': -400, 'ET': -500, 'CT': -600, 'MT': -700, 'PT': -800} rfc822._timezones.update(_additional_timezones) registerDateHandler(_parse_date_rfc822) def _parse_date(dateString): """Parses a variety of date formats into a 9-tuple in GMT""" for handler in _date_handlers: try: date9tuple = handler(dateString) if not date9tuple: continue if len(date9tuple) != 9: if _debug: sys.stderr.write("date handler function must return 9-tuple\n") raise ValueError map(int, date9tuple) return date9tuple except Exception, e: if _debug: sys.stderr.write("%s raised %s\n" % (handler.__name__, repr(e))) pass return None def _getCharacterEncoding(http_headers, xml_data): """Get the character encoding of the XML document http_headers is a dictionary xml_data is a raw string (not Unicode) This is so much trickier than it sounds, it's not even funny. According to RFC 3023 ("XML Media Types"), if the HTTP Content-Type is application/xml, application/*+xml, application/xml-external-parsed-entity, or application/xml-dtd, the encoding given in the charset parameter of the HTTP Content-Type takes precedence over the encoding given in the XML prefix within the document, and defaults to "utf-8" if neither are specified. But, if the HTTP Content-Type is text/xml, text/*+xml, or text/xml-external-parsed-entity, the encoding given in the XML prefix within the document is ALWAYS IGNORED and only the encoding given in the charset parameter of the HTTP Content-Type header should be respected, and it defaults to "us-ascii" if not specified. Furthermore, discussion on the atom-syntax mailing list with the author of RFC 3023 leads me to the conclusion that any document served with a Content-Type of text/* and no charset parameter must be treated as us-ascii. (We now do this.) And also that it must always be flagged as non-well-formed. (We now do this too.) If Content-Type is unspecified (input was local file or non-HTTP source) or unrecognized (server just got it totally wrong), then go by the encoding given in the XML prefix of the document and default to "iso-8859-1" as per the HTTP specification (RFC 2616). Then, assuming we didn't find a character encoding in the HTTP headers (and the HTTP Content-type allowed us to look in the body), we need to sniff the first few bytes of the XML data and try to determine whether the encoding is ASCII-compatible. Section F of the XML specification shows the way here: http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info If the sniffed encoding is not ASCII-compatible, we need to make it ASCII compatible so that we can sniff further into the XML declaration to find the encoding attribute, which will tell us the true encoding. Of course, none of this guarantees that we will be able to parse the feed in the declared character encoding (assuming it was declared correctly, which many are not). CJKCodecs and iconv_codec help a lot; you should definitely install them if you can. http://cjkpython.i18n.org/ """ def _parseHTTPContentType(content_type): """takes HTTP Content-Type header and returns (content type, charset) If no charset is specified, returns (content type, '') If no content type is specified, returns ('', '') Both return parameters are guaranteed to be lowercase strings """ content_type = content_type or '' content_type, params = cgi.parse_header(content_type) return content_type, params.get('charset', '').replace("'", "") sniffed_xml_encoding = '' xml_encoding = '' true_encoding = '' http_content_type, http_encoding = _parseHTTPContentType(http_headers.get("content-type")) # Must sniff for non-ASCII-compatible character encodings before # searching for XML declaration. This heuristic is defined in # section F of the XML specification: # http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info try: if xml_data[:4] == '\x4c\x6f\xa7\x94': # EBCDIC xml_data = _ebcdic_to_ascii(xml_data) elif xml_data[:4] == '\x00\x3c\x00\x3f': # UTF-16BE sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') and (xml_data[2:4] != '\x00\x00'): # UTF-16BE with BOM sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x3f\x00': # UTF-16LE sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and (xml_data[2:4] != '\x00\x00'): # UTF-16LE with BOM sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') elif xml_data[:4] == '\x00\x00\x00\x3c': # UTF-32BE sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x00\x00': # UTF-32LE sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') elif xml_data[:4] == '\x00\x00\xfe\xff': # UTF-32BE with BOM sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') elif xml_data[:4] == '\xff\xfe\x00\x00': # UTF-32LE with BOM sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') elif xml_data[:3] == '\xef\xbb\xbf': # UTF-8 with BOM sniffed_xml_encoding = 'utf-8' xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') else: # ASCII-compatible pass xml_encoding_match = re.compile('^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data) except: xml_encoding_match = None if xml_encoding_match: xml_encoding = xml_encoding_match.groups()[0].lower() if sniffed_xml_encoding and (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', 'iso-10646-ucs-4', 'ucs-4', 'csucs4', 'utf-16', 'utf-32', 'utf_16', 'utf_32', 'utf16', 'u16')): xml_encoding = sniffed_xml_encoding acceptable_content_type = 0 application_content_types = ('application/xml', 'application/xml-dtd', 'application/xml-external-parsed-entity') text_content_types = ('text/xml', 'text/xml-external-parsed-entity') if (http_content_type in application_content_types) or \ (http_content_type.startswith('application/') and http_content_type.endswith('+xml')): acceptable_content_type = 1 true_encoding = http_encoding or xml_encoding or 'utf-8' elif (http_content_type in text_content_types) or \ (http_content_type.startswith('text/')) and http_content_type.endswith('+xml'): acceptable_content_type = 1 true_encoding = http_encoding or 'us-ascii' elif http_content_type.startswith('text/'): true_encoding = http_encoding or 'us-ascii' elif http_headers and (not http_headers.has_key('content-type')): true_encoding = xml_encoding or 'iso-8859-1' else: true_encoding = xml_encoding or 'utf-8' return true_encoding, http_encoding, xml_encoding, sniffed_xml_encoding, acceptable_content_type def _toUTF8(data, encoding): """Changes an XML data stream on the fly to specify a new encoding data is a raw sequence of bytes (not Unicode) that is presumed to be in %encoding already encoding is a string recognized by encodings.aliases """ if _debug: sys.stderr.write('entering _toUTF8, trying encoding %s\n' % encoding) # strip Byte Order Mark (if present) if (len(data) >= 4) and (data[:2] == '\xfe\xff') and (data[2:4] != '\x00\x00'): if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-16be': sys.stderr.write('trying utf-16be instead\n') encoding = 'utf-16be' data = data[2:] elif (len(data) >= 4) and (data[:2] == '\xff\xfe') and (data[2:4] != '\x00\x00'): if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-16le': sys.stderr.write('trying utf-16le instead\n') encoding = 'utf-16le' data = data[2:] elif data[:3] == '\xef\xbb\xbf': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-8': sys.stderr.write('trying utf-8 instead\n') encoding = 'utf-8' data = data[3:] elif data[:4] == '\x00\x00\xfe\xff': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-32be': sys.stderr.write('trying utf-32be instead\n') encoding = 'utf-32be' data = data[4:] elif data[:4] == '\xff\xfe\x00\x00': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-32le': sys.stderr.write('trying utf-32le instead\n') encoding = 'utf-32le' data = data[4:] newdata = unicode(data, encoding) if _debug: sys.stderr.write('successfully converted %s data to unicode\n' % encoding) declmatch = re.compile('^<\?xml[^>]*?>') newdecl = """""" if declmatch.search(newdata): newdata = declmatch.sub(newdecl, newdata) else: newdata = newdecl + u'\n' + newdata return newdata.encode("utf-8") def _stripDoctype(data): """Strips DOCTYPE from XML document, returns (rss_version, stripped_data) rss_version may be "rss091n" or None stripped_data is the same XML document, minus the DOCTYPE """ entity_pattern = re.compile(r']*?)>', re.MULTILINE) data = entity_pattern.sub('', data) doctype_pattern = re.compile(r']*?)>', re.MULTILINE) doctype_results = doctype_pattern.findall(data) doctype = doctype_results and doctype_results[0] or '' if doctype.lower().count('netscape'): version = 'rss091n' else: version = None data = doctype_pattern.sub('', data) return version, data def parse(url_file_stream_or_string, etag=None, modified=None, agent=None, referrer=None, handlers=[]): """Parse a feed from a URL, file, stream, or string""" result = FeedParserDict() result['feed'] = FeedParserDict() result['entries'] = [] if _XML_AVAILABLE: result['bozo'] = 0 if type(handlers) == types.InstanceType: handlers = [handlers] try: f = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers) data = f.read() except Exception, e: result['bozo'] = 1 result['bozo_exception'] = e data = '' f = None # if feed is gzip-compressed, decompress it if f and data and hasattr(f, "headers"): if gzip and f.headers.get('content-encoding', '') == 'gzip': try: data = gzip.GzipFile(fileobj=_StringIO(data)).read() except Exception, e: # Some feeds claim to be gzipped but they're not, so # we get garbage. Ideally, we should re-request the # feed without the "Accept-encoding: gzip" header, # but we don't. result['bozo'] = 1 result['bozo_exception'] = e data = '' elif zlib and f.headers.get('content-encoding', '') == 'deflate': try: data = zlib.decompress(data, -zlib.MAX_WBITS) except Exception, e: result['bozo'] = 1 result['bozo_exception'] = e data = '' # save HTTP headers if hasattr(f, "info"): info = f.info() result["etag"] = info.getheader("ETag") last_modified = info.getheader("Last-Modified") if last_modified: result["modified"] = _parse_date(last_modified) if hasattr(f, "url"): result["url"] = f.url result["status"] = 200 if hasattr(f, "status"): result["status"] = f.status if hasattr(f, "headers"): result["headers"] = f.headers.dict if hasattr(f, "close"): f.close() # there are four encodings to keep track of: # - http_encoding is the encoding declared in the Content-Type HTTP header # - xml_encoding is the encoding declared in the ; changed # project name #2.5 - 7/25/2003 - MAP - changed to Python license (all contributors agree); # removed unnecessary urllib code -- urllib2 should always be available anyway; # return actual url, status, and full HTTP headers (as result['url'], # result['status'], and result['headers']) if parsing a remote feed over HTTP -- # this should pass all the HTTP tests at ; # added the latest namespace-of-the-week for RSS 2.0 #2.5.1 - 7/26/2003 - RMK - clear opener.addheaders so we only send our custom # User-Agent (otherwise urllib2 sends two, which confuses some servers) #2.5.2 - 7/28/2003 - MAP - entity-decode inline xml properly; added support for # inline and as used in some RSS 2.0 feeds #2.5.3 - 8/6/2003 - TvdV - patch to track whether we're inside an image or # textInput, and also to return the character encoding (if specified) #2.6 - 1/1/2004 - MAP - dc:author support (MarekK); fixed bug tracking # nested divs within content (JohnD); fixed missing sys import (JohanS); # fixed regular expression to capture XML character encoding (Andrei); # added support for Atom 0.3-style links; fixed bug with textInput tracking; # added support for cloud (MartijnP); added support for multiple # category/dc:subject (MartijnP); normalize content model: "description" gets # description (which can come from description, summary, or full content if no # description), "content" gets dict of base/language/type/value (which can come # from content:encoded, xhtml:body, content, or fullitem); # fixed bug matching arbitrary Userland namespaces; added xml:base and xml:lang # tracking; fixed bug tracking unknown tags; fixed bug tracking content when # element is not in default namespace (like Pocketsoap feed); # resolve relative URLs in link, guid, docs, url, comments, wfw:comment, # wfw:commentRSS; resolve relative URLs within embedded HTML markup in # description, xhtml:body, content, content:encoded, title, subtitle, # summary, info, tagline, and copyright; added support for pingback and # trackback namespaces #2.7 - 1/5/2004 - MAP - really added support for trackback and pingback # namespaces, as opposed to 2.6 when I said I did but didn't really; # sanitize HTML markup within some elements; added mxTidy support (if # installed) to tidy HTML markup within some elements; fixed indentation # bug in _parse_date (FazalM); use socket.setdefaulttimeout if available # (FazalM); universal date parsing and normalization (FazalM): 'created', modified', # 'issued' are parsed into 9-tuple date format and stored in 'created_parsed', # 'modified_parsed', and 'issued_parsed'; 'date' is duplicated in 'modified' # and vice-versa; 'date_parsed' is duplicated in 'modified_parsed' and vice-versa #2.7.1 - 1/9/2004 - MAP - fixed bug handling " and '. fixed memory # leak not closing url opener (JohnD); added dc:publisher support (MarekK); # added admin:errorReportsTo support (MarekK); Python 2.1 dict support (MarekK) #2.7.4 - 1/14/2004 - MAP - added workaround for improperly formed
tags in # encoded HTML (skadz); fixed unicode handling in normalize_attrs (ChrisL); # fixed relative URI processing for guid (skadz); added ICBM support; added # base64 support #2.7.5 - 1/15/2004 - MAP - added workaround for malformed DOCTYPE (seen on many # blogspot.com sites); added _debug variable #2.7.6 - 1/16/2004 - MAP - fixed bug with StringIO importing #3.0b3 - 1/23/2004 - MAP - parse entire feed with real XML parser (if available); # added several new supported namespaces; fixed bug tracking naked markup in # description; added support for enclosure; added support for source; re-added # support for cloud which got dropped somehow; added support for expirationDate #3.0b4 - 1/26/2004 - MAP - fixed xml:lang inheritance; fixed multiple bugs tracking # xml:base URI, one for documents that don't define one explicitly and one for # documents that define an outer and an inner xml:base that goes out of scope # before the end of the document #3.0b5 - 1/26/2004 - MAP - fixed bug parsing multiple links at feed level #3.0b6 - 1/27/2004 - MAP - added feed type and version detection, result["version"] # will be one of SUPPORTED_VERSIONS.keys() or empty string if unrecognized; # added support for creativeCommons:license and cc:license; added support for # full Atom content model in title, tagline, info, copyright, summary; fixed bug # with gzip encoding (not always telling server we support it when we do) #3.0b7 - 1/28/2004 - MAP - support Atom-style author element in author_detail # (dictionary of "name", "url", "email"); map author to author_detail if author # contains name + email address #3.0b8 - 1/28/2004 - MAP - added support for contributor #3.0b9 - 1/29/2004 - MAP - fixed check for presence of dict function; added # support for summary #3.0b10 - 1/31/2004 - MAP - incorporated ISO-8601 date parsing routines from # xml.util.iso8601 #3.0b11 - 2/2/2004 - MAP - added 'rights' to list of elements that can contain # dangerous markup; fiddled with decodeEntities (not right); liberalized # date parsing even further #3.0b12 - 2/6/2004 - MAP - fiddled with decodeEntities (still not right); # added support to Atom 0.2 subtitle; added support for Atom content model # in copyright; better sanitizing of dangerous HTML elements with end tags # (script, frameset) #3.0b13 - 2/8/2004 - MAP - better handling of empty HTML tags (br, hr, img, # etc.) in embedded markup, in either HTML or XHTML form (
,
,
) #3.0b14 - 2/8/2004 - MAP - fixed CDATA handling in non-wellformed feeds under # Python 2.1 #3.0b15 - 2/11/2004 - MAP - fixed bug resolving relative links in wfw:commentRSS; # fixed bug capturing author and contributor URL; fixed bug resolving relative # links in author and contributor URL; fixed bug resolvin relative links in # generator URL; added support for recognizing RSS 1.0; passed Simon Fell's # namespace tests, and included them permanently in the test suite with his # permission; fixed namespace handling under Python 2.1 #3.0b16 - 2/12/2004 - MAP - fixed support for RSS 0.90 (broken in b15) #3.0b17 - 2/13/2004 - MAP - determine character encoding as per RFC 3023 #3.0b18 - 2/17/2004 - MAP - always map description to summary_detail (Andrei); # use libxml2 (if available) #3.0b19 - 3/15/2004 - MAP - fixed bug exploding author information when author # name was in parentheses; removed ultra-problematic mxTidy support; patch to # workaround crash in PyXML/expat when encountering invalid entities # (MarkMoraes); support for textinput/textInput #3.0b20 - 4/7/2004 - MAP - added CDF support #3.0b21 - 4/14/2004 - MAP - added Hot RSS support #3.0b22 - 4/19/2004 - MAP - changed 'channel' to 'feed', 'item' to 'entries' in # results dict; changed results dict to allow getting values with results.key # as well as results[key]; work around embedded illformed HTML with half # a DOCTYPE; work around malformed Content-Type header; if character encoding # is wrong, try several common ones before falling back to regexes (if this # works, bozo_exception is set to CharacterEncodingOverride); fixed character # encoding issues in BaseHTMLProcessor by tracking encoding and converting # from Unicode to raw strings before feeding data to sgmllib.SGMLParser; # convert each value in results to Unicode (if possible), even if using # regex-based parsing #3.0b23 - 4/21/2004 - MAP - fixed UnicodeDecodeError for feeds that contain # high-bit characters in attributes in embedded HTML in description (thanks # Thijs van de Vossen); moved guid, date, and date_parsed to mapped keys in # FeedParserDict; tweaked FeedParserDict.has_key to return True if asking # about a mapped key #3.0fc1 - 4/23/2004 - MAP - made results.entries[0].links[0] and # results.entries[0].enclosures[0] into FeedParserDict; fixed typo that could # cause the same encoding to be tried twice (even if it failed the first time); # fixed DOCTYPE stripping when DOCTYPE contained entity declarations; # better textinput and image tracking in illformed RSS 1.0 feeds #3.0fc2 - 5/10/2004 - MAP - added and passed Sam's amp tests; added and passed # my blink tag tests #3.0fc3 - 6/18/2004 - MAP - fixed bug in _changeEncodingDeclaration that # failed to parse utf-16 encoded feeds; made source into a FeedParserDict; # duplicate admin:generatorAgent/@rdf:resource in generator_detail.url; # added support for image; refactored parse() fallback logic to try other # encodings if SAX parsing fails (previously it would only try other encodings # if re-encoding failed); remove unichr madness in normalize_attrs now that # we're properly tracking encoding in and out of BaseHTMLProcessor; set # feed.language from root-level xml:lang; set entry.id from rdf:about; # send Accept header #3.0 - 6/21/2004 - MAP - don't try iso-8859-1 (can't distinguish between # iso-8859-1 and windows-1252 anyway, and most incorrectly marked feeds are # windows-1252); fixed regression that could cause the same encoding to be # tried twice (even if it failed the first time) #3.0.1 - 6/22/2004 - MAP - default to us-ascii for all text/* content types; # recover from malformed content-type header parameter with no equals sign # ("text/xml; charset:iso-8859-1") #3.1 - 6/28/2004 - MAP - added and passed tests for converting HTML entities # to Unicode equivalents in illformed feeds (aaronsw); added and # passed tests for converting character entities to Unicode equivalents # in illformed feeds (aaronsw); test for valid parsers when setting # XML_AVAILABLE; make version and encoding available when server returns # a 304; add handlers parameter to pass arbitrary urllib2 handlers (like # digest auth or proxy support); add code to parse username/password # out of url and send as basic authentication; expose downloading-related # exceptions in bozo_exception (aaronsw); added __contains__ method to # FeedParserDict (aaronsw); added publisher_detail (aaronsw) #3.2 - 7/3/2004 - MAP - use cjkcodecs and iconv_codec if available; always # convert feed to UTF-8 before passing to XML parser; completely revamped # logic for determining character encoding and attempting XML parsing # (much faster); increased default timeout to 20 seconds; test for presence # of Location header on redirects; added tests for many alternate character # encodings; support various EBCDIC encodings; support UTF-16BE and # UTF16-LE with or without a BOM; support UTF-8 with a BOM; support # UTF-32BE and UTF-32LE with or without a BOM; fixed crashing bug if no # XML parsers are available; added support for "Content-encoding: deflate"; # send blank "Accept-encoding: " header if neither gzip nor zlib modules # are available #3.3 - 7/15/2004 - MAP - optimize EBCDIC to ASCII conversion; fix obscure # problem tracking xml:base and xml:lang if element declares it, child # doesn't, first grandchild redeclares it, and second grandchild doesn't; # refactored date parsing; defined public registerDateHandler so callers # can add support for additional date formats at runtime; added support # for OnBlog, Nate, MSSQL, Greek, and Hungarian dates (ytrewq1); added # zopeCompatibilityHack() which turns FeedParserDict into a regular # dictionary, required for Zope compatibility, and also makes command- # line debugging easier because pprint module formats real dictionaries # better than dictionary-like objects; added NonXMLContentType exception, # which is stored in bozo_exception when a feed is served with a non-XML # media type such as "text/plain"; respect Content-Language as default # language if not xml:lang is present; cloud dict is now FeedParserDict; # generator dict is now FeedParserDict; better tracking of xml:lang, # including support for xml:lang="" to unset the current language; # recognize RSS 1.0 feeds even when RSS 1.0 namespace is not the default # namespace; don't overwrite final status on redirects (scenarios: # redirecting to a URL that returns 304, redirecting to a URL that # redirects to another URL with a different type of redirect); add # support for HTTP 303 redirects PenguinTV-4.2.0/bin/0000755000000000000000000000000011450721172011032 5ustar PenguinTV-4.2.0/bin/PenguinTV0000775000000000000000000000544011450721611012642 0ustar #!/bin/sh # # Copyright (c) 2008 Owen Williams # You may use and distribute this software under the terms of the # GNU General Public License, version 2 or later # # run-moz code from Conduit (thanks!) ## Work around https://bugs.launchpad.net/ubuntu/+source/firefox/+bug/26436 ## Note: This bug is also present in Mandriva and Fedora, for this reason, ## We will use run-mozilla in all cases ## ## Run mozilla takes care of settin up the appropriate environment variables ## to run a program using mozilla libs if [ "$1"x = "--no-launcher"x ] ; then USE_LAUNCHER=0 shift else USE_LAUNCHER=1 fi HILDON=0 RUNMOZ_DIR="" if [ "$PYTHON"x = "x" ] ; then PYTHON="/usr/bin/env python" fi if [ -f "/usr/lib/libgtkembedmoz.so.0" -a -d "/usr/lib/microb-engine" ]; then HILDON=1 echo "Launching Hildon version" #export MOZILLA_FIVE_HOME="/usr/lib/microb-engine" #export LD_LIBRARY_PATH="/usr/lib/microb-engine" else if [ "$WITH_MOZ_DIR"x = "x" ] ; then for d in `ls -d /usr/lib*/firefox* /usr/lib*/mozilla-firefox* /usr/lib*/xulrunner* 2> /dev/null | grep -v addons | grep -v devel | sort -r` do if [ -x $d/run-mozilla.sh ]; then echo INFO: FOUND FIREFOX LIBS AT $d WITH_MOZ_DIR="$d" break fi done fi fi rundir=`dirname $0` cd $rundir if [ -f "./penguintv/penguintv.py" ] ; then ptv_home="./penguintv" elif [ -f "../penguintv/penguintv.py" ] ; then cd .. ptv_home="./penguintv" else ptv_home=`$PYTHON -c "import os, sys; if os.environ.has_key('PENGUINTV_LIB'): print os.environ['PENGUINTV_LIB']; sys.exit(0); for d in [\"$rundir\"] + sys.path: if len(d) > 0: if d[0] == '.': d = os.path.join(os.getcwd(), d); sd = os.path.join(d, 'penguintv'); if os.path.isdir(sd): print sd; sys.exit(0); h, t = os.path.split(os.path.split(os.path.abspath(sys.argv[0]))[0]); if t == 'bin': libdir = os.path.join(h, 'lib'); fp = os.path.join(libdir, 'penguintv'); if os.path.isdir(fp): print libdir; sys.exit(0); sys.exit(1)"` if [ $? -gt 0 ] ; then echo "Error finding PenguinTV library home. Please export PENGUINTV_LIB" exit 1 fi fi echo "Running ptv in $ptv_home" export PTV_PREFIX=$rundir if [ $HILDON -eq 1 ] ; then PYTHONPATH="$ptv_home" cd $ptv_home if [ $USE_LAUNCHER -eq 1 ] ; then run-standalone.sh /usr/bin/python ./penguintv.py $* else run-standalone.sh /usr/bin/python2.5 ./penguintv.py $* fi else PYTHONPATH="$ptv_home" cd $ptv_home if [ "$WITH_MOZ_DIR"x = "x" ] ; then echo "WARNING: COULD NOT FIND FIREFOX LIBRARIES" echo "WARNING: PENGUINTV MAY CRASH UNEXPECTEDLY" echo "Please export WITH_MOZ_DIR with the location of run-mozilla.sh to correct this issue" $PYTHON ./penguintv.py $* else $WITH_MOZ_DIR/run-mozilla.sh $PYTHON ./penguintv.py $* fi fi PenguinTV-4.2.0/debian/0000755000000000000000000000000011450721511011501 5ustar PenguinTV-4.2.0/debian/compat0000644000000000000000000000000211450721445012705 0ustar 5 PenguinTV-4.2.0/debian/01-fix-setup.py.patch0000644000000000000000000001471411450721445015330 0ustar diff -Nur -x '*.orig' -x '*~' penguintv-4.0.0/setup.py penguintv-4.0.0.new/setup.py --- penguintv-4.0.0/setup.py 2009-02-27 03:38:50.000000000 +0100 +++ penguintv-4.0.0.new/setup.py 2009-05-04 19:34:05.000000000 +0200 @@ -2,7 +2,6 @@ #this file is a catastrophe. I'm sorry. import sys,os -from penguintv import subProcess as my_subProcess import subprocess try: @@ -21,23 +20,12 @@ try: print "Building OLPC version" - sp = my_subProcess.subProcess("cp -f share/penguintv.glade.olpc share/penguintv.glade") - if sp.read() != 0: - print "There was an error symlinking the glade file" - sys.exit(1) - bundlebuilder.start("NewsReader", manifest='MANIFEST-OLPC') except Exception, e: print "problem building for OLPC:", e sys.exit(1) sys.exit(0) elif HAS_HILDON: - print "Building hildon version" - sp = my_subProcess.subProcess("cp -f share/penguintv.glade.hildon share/penguintv.glade") - if sp.read() != 0: - print "There was an error copying the glade file" - sys.exit(1) -else: print "Building desktop version" sp = my_subProcess.subProcess("cp -f share/penguintv.glade.desktop share/penguintv.glade") @@ -49,7 +37,6 @@ from distutils.extension import Extension import locale, gettext -from penguintv.utils import GlobDirectoryWalker, _mkdir locale.setlocale(locale.LC_ALL, '') gettext.install('penguintv', '/usr/share/locale') gettext.bindtextdomain('penguintv', '/usr/share/locale') @@ -58,36 +45,6 @@ missing_something = [] -try: - import gtkmozembed -except: - #maybe we built gtkmozembed for maemo with build-deb.sh - try: - from penguintv.ptvmozembed import gtkmozembed - except: - print "WARNING: gtkmozembed not found. This is usually provided by a package like python-gnome2-extras or gnome-python2-gtkmozembed" - print " PenguinTV will still run without gtkmozembed, but the experience isn't as good." - #if HAS_HILDON: - # missing_something.append("On Maemo, gtkmozembed is created by running ./build_maemo_deb.sh and creating a package") - -try: - import sqlite3 -except: - try: - from pysqlite2 import dbapi2 as sqlite - except: - missing_something.append("Need pysqlite version 2 or higher (http://pysqlite.org/)") - -try: - import pycurl -except: - missing_something.append("Need pycurl (http://pycurl.sourceforge.net/)") - -try: - import Image -except: - missing_something.append("Need python imaging (http://www.pythonware.com/products/pil/)") - #try: # import gnome #except: @@ -140,17 +97,6 @@ #else: # print "Setting default MOZILLA_FIVE_HOME to", moz_lib_dir -code = subprocess.call(["which","msgfmt"]) -if code != 0: - HAVE_GETTEXT = False - print "Need gettext to generate translations -- disabling translations." - #missing_something.append("Need gettext") -else: - HAVE_GETTEXT = True - -if len(missing_something) > 0: - sys.exit("\n".join(missing_something)) - try: os.stat("./bin") except: @@ -168,23 +114,7 @@ f.close() os.chmod("./bin/PenguinTV", 0775) -from penguintv import utils - locales = [] -if HAVE_GETTEXT: - if "build" in sys.argv or "install" in sys.argv: - - for f in GlobDirectoryWalker("./po", "*.po"): - this_locale = os.path.basename(f) - this_locale = this_locale[0:this_locale.rfind('.')] - _mkdir("./mo/"+this_locale+"/LC_MESSAGES") - msgfmt_line = "msgfmt "+f+" -o ./mo/"+this_locale+"/LC_MESSAGES/penguintv.mo" - print msgfmt_line - locales.append(('share/locale/'+this_locale+'/LC_MESSAGES', ['mo/'+this_locale+'/LC_MESSAGES/penguintv.mo'])) - sp = my_subProcess.subProcess(msgfmt_line) - if sp.read() != 0: - print "There was an error building the MO file for locale "+this_locale - sys.exit(1) data_files = [('share/penguintv', ['share/penguintv.glade','share/defaultsubs.opml','share/penguintvicon.png','share/mozilla.css','share/gtkhtml.css','share/mozilla-planet.css','share/mozilla-planet-hildon.css']), ('share/penguintv/glade', ['share/glade/dialogs.glade']), @@ -197,16 +127,7 @@ ('share/dbus-1/services', ['share/penguintv.service'])] data_files += locales -if utils.RUNNING_HILDON: - data_files += [('share/applications/hildon/',['penguintv-hildon.desktop']), - ('share/icons/hicolor/scalable/hildon', ['share/penguintvicon.png']), - ('share/icons/hicolor/64x64/hildon', ['share/pixmaps/64x64/penguintvicon.png']), - ('share/icons/hicolor/40x40/hildon', ['share/pixmaps/40x40/penguintvicon.png']), - ('share/icons/hicolor/26x26/hildon', ['share/pixmaps/26x26/penguintvicon.png']), - ('share/penguintv/glade', ['share/glade/hildon.glade', 'share/glade/hildon_dialogs.glade', - 'share/glade/hildon_dialog_add_feed.glade','share/glade/hildon_planet.glade']),] -else: - data_files += [('share/applications', ['penguintv.desktop']), +data_files += [('share/applications', ['penguintv.desktop']), ('share/icons/hicolor/scalable/apps', ['share/penguintvicon.png']), ('share/icons/hicolor/64x64/apps', ['share/pixmaps/64x64/penguintvicon.png']), ('share/icons/hicolor/40x40/apps', ['share/pixmaps/40x40/penguintvicon.png']), @@ -216,7 +137,7 @@ 'share/glade/planet.glade', 'share/glade/vertical.glade']),] setup(name = "PenguinTV", -version = utils.VERSION, +version = '4.00', description = 'GNOME-compatible podcast and videoblog reader', author = 'Owen Williams', author_email = 'owen-penguintv@ywwg.com', @@ -231,38 +152,3 @@ "penguintv/amazon", "penguintv/BeautifulSoup"]) -if "install" in sys.argv: - sp = my_subProcess.subProcess('''GCONF_CONFIG_SOURCE=$(gconftool-2 --get-default-source) gconftool-2 --makefile-install-rule share/penguintv.schema''') - if sp.read() != 0: - print sp.outdata - print "There was an error installing the gconf schema" - sys.exit(1) - else: - print sp.outdata - -print "" -something_disabled = False - -try: - import gconf -except: - try: - from gnome import gconf - except: - print "WARNING: gconf not installed or not installed correctly: Gconf support will be disabled" - something_disabled = True - -try: - import pygst - pygst.require("0.10") - import gst -except: - print "WARNING: gstreamer .10 or greater not installed or not installed correctly: Built-in player will be disabled" - something_disabled = True - -if something_disabled: - print """If anything above was disabled and you install that library, PenguinTV will detect it automatically - and re-enable support. You do not have to reinstall PenguinTV to enable support for these features""" - -if "build" in sys.argv: - print "You can run ./bin/PenguinTV to run PenguinTV" PenguinTV-4.2.0/debian/control0000644000000000000000000000225611450721445013117 0ustar Source: penguintv Section: gnome Priority: optional Maintainer: Ubuntu MOTU Developers Build-Depends: debhelper (>= 5.0.37.2), cdbs, python-dev (>= 2.4), python-central (>= 0.5) XS-Python-Version: current Standards-Version: 3.8.0 Homepage: http://penguintv.sourceforge.net/ Package: penguintv Architecture: all XB-Python-Version: ${python:Versions} Provides: ${python:Provides} Depends: ${python:Depends}, ${misc:Depends}, python-pycurl, python-pysqlite2, python-libxml2, gconf2, python-gnome2-extras, python-glade2, python-pyrex, python-gnome2, python-gtkhtml2, python-dbus, python-feedparser Recommends: python-gst0.10, python-xapian Description: podcasts and video blogs for Linux PenguinTV has a light-weight but powerful interface that shows you what you need to know, and hides what you don't. You can immediately see what you've watched, what's available to view, and which media are downloading. You don't have to worry about where files are stored, what their names are, or what format they are in. Rather than try to reinvent the wheel by coding its own media player, PenguinTV launches the media player of your choice to view your downloads. PenguinTV-4.2.0/debian/manpages0000644000000000000000000000002311450721445013220 0ustar debian/PenguinTV.1 PenguinTV-4.2.0/debian/copyright0000644000000000000000000000246111450721445013445 0ustar This package was debianized by Daniel Holbach on Fri, 13 Jan 2006 15:23:06 +0100. It was downloaded from http://penguintv.sourceforge.net/ Copyright Holder: 2002-2006 Timothy O'Malley Juri Pakaste Owen Williams (c) 2006-2007 Amazon Digital Services, Inc. Licence for PenguinTV.in: You may use and distribute this software under the terms of the GNU General Public License, version 2 or later License for others files: This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . On Debian systems, the complete text of the GNU General Public License can be found in `/usr/share/common-licenses/GPL'. PenguinTV-4.2.0/debian/02-fix-desktop-file.patch0000644000000000000000000000063411450721445016124 0ustar diff -Nur -x '*.orig' -x '*~' penguintv-3.6.5/penguintv.desktop penguintv-3.6.5.new/penguintv.desktop --- penguintv-3.6.5/penguintv.desktop 2008-07-30 17:59:37.000000000 +0200 +++ penguintv-3.6.5.new/penguintv.desktop 2009-03-20 07:44:31.000000000 +0100 @@ -1,5 +1,4 @@ [Desktop Entry] -Encoding=UTF-8 Name=PenguinTV Media Aggregator Comment=Aggregates rss feeds, podcasts, and video blogs TryExec=PenguinTV PenguinTV-4.2.0/debian/03-fix-explicit-python2.5.patch0000644000000000000000000000060411450721445017121 0ustar diff -Nur -x '*.orig' -x '*~' penguintv-3.6.5/penguintv/Poller.py penguintv-3.6.5.new/penguintv/Poller.py --- penguintv-3.6.5/penguintv/Poller.py 2008-07-30 17:59:51.000000000 +0200 +++ penguintv-3.6.5.new/penguintv/Poller.py 2009-03-20 07:53:05.000000000 +0100 @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.5 +#!/usr/bin/python #Out-of-process poller for PenguinTV #returns data over dbus PenguinTV-4.2.0/debian/PenguinTV.10000644000000000000000000000133011450721445013445 0ustar .TH "PenguinTV" "1" .SH "NAME" PenguinTV\- podcasts and video blogs for Linux .SH "SYNOPSIS" \fB .SH "DESCRIPTION" .B PenguinTV has a light-weight but powerful interface that shows you what you need to know, and hides what you don't. You can immediately see what you've watched, what's available to view, and which media are downloading. You don't have to worry about where files are stored, what their names are, or what format they are in. Rather than try to reinvent the wheel by coding its own media player, PenguinTV launches the media player of your choice to view your downloads. .SH "AUTHOR" .PP This manual page was written by Cody A.W. Somerville for the \fBUbuntu GNU/Linux\fP system. PenguinTV-4.2.0/debian/changelog0000644000000000000000000001065311450721605013364 0ustar penguintv (4.2.0) lucid; ugency=low * New upstream release -- Owen Williams Wed, 29 Sep 2010 16:37:00 -0500 penguintv (4.0.0-0ubuntu1) karmic; urgency=low * New upstream release (LP: #363457). * Refresh 03-fix-explicit-python2.5.patch patch. -- Alessio Treglia Mon, 04 May 2009 19:23:37 +0200 penguintv (3.6.5-0ubuntu3) jaunty; urgency=low * debian/rules: - remove /usr/share/pyshared-data/penguintv in the build-tree before runnign dh_pycentral again to force rebuild of the data (LP: #352369) -- Michael Vogt Tue, 31 Mar 2009 17:17:51 +0200 penguintv (3.6.5-0ubuntu2) jaunty; urgency=low [ Fabrice Coutadeur ] * debian/rules: delete feedparser.py file in installation tree to fix a FTBFS (LP: #342116) [ Daniel Holbach ] * debian/control: bump Standards-Version. * debian/patches/02-fix-desktop-file.patch: remod obsolete encoding entry. * debian/patches/03-fix-explicit-python2.5.patch: drop explicit use of python2.5. -- Fabrice Coutadeur Fri, 13 Mar 2009 04:58:03 +0000 penguintv (3.6.5-0ubuntu1) jaunty; urgency=low [ Julien Lavergne ] * New upstream release (LP: #306391) * debian/control - Add python-dbus as Depends. - Add python-feedparser as Depends to link to the Ubuntu feedparser package. - Add python-xapian as Recommends to add xapian search support. - Add python-gst0.10 as Recommends to add internal media playback. * debian/rules - Remove local feedparser.py * debian/patches/01-fix.setup.py.patch: Update. * debian/watch - Change tarbal location * debian/copyright - Licence is now GPL-3+. - Add GPL-2 file. - Add new copyright holder. [ James Westby ] * Change maintainer field to Ubuntu MOTU Developers. -- James Westby Thu, 11 Dec 2008 14:27:48 -0800 penguintv (3.0-0ubuntu3) hardy; urgency=low * debian/control: drop python-xml Depends. (LP: #199014) -- Daniel Holbach Fri, 07 Mar 2008 12:49:03 +0100 penguintv (3.0-0ubuntu2) hardy; urgency=low * debian/watch: Added watchfile. * debian/control: - Bumped standards version - Added Homepage field (moved from long description) * debian/PenguinTV.1: Added manpage for PenguinTV * debian/manpages: Added to install new manpage * Closes lp: #182737 -- Cody A.W. Somerville Sun, 13 Jan 2008 21:38:39 -0400 penguintv (3.0-0ubuntu1) gutsy; urgency=low * New upstream release. * debian/patches/01-fix-setup.py.patch: updated. * debian/control: drop a lot of unneeded Build-Depends. -- Daniel Holbach Fri, 08 Jun 2007 16:49:59 +0200 penguintv (2.80-0ubuntu1) feisty; urgency=low * New upstream release. * debian/patches/01-fix-setup.py.patch: - updated. * debian/control: - added python-gtkhtml2 to {Build#,}Depends -- Daniel Holbach Fri, 9 Feb 2007 10:13:27 +0100 penguintv (2.0.2-0ubuntu2) edgy; urgency=low * debian/control: - added python-gnome2 to Depends - thanks Martin Döring -- Daniel Holbach Mon, 11 Sep 2006 09:34:10 +0200 penguintv (2.0.2-0ubuntu1) edgy; urgency=low * New upstream release. (Malone: #52567) * debian/patches/01-fix-setup.py.patch: - updated. -- Daniel Holbach Tue, 22 Aug 2006 14:30:51 +0200 penguintv (1.93-0ubuntu1) edgy; urgency=low * New upstream release. (Malone: #52567) * debian/{rules,control,pycompat}: - use pycentral. -- Daniel Holbach Thu, 27 Jul 2006 02:31:07 +0200 penguintv (1.02-0ubuntu4) dapper; urgency=low * debian/control + Fixed typo. -- Chuck Short Mon, 15 May 2006 10:32:05 -0400 penguintv (1.02-0ubuntu3) dapper; urgency=low * debian/control: + Added dependency on python-glade2. (Closes: Malone #44488) -- Chuck Short Sun, 14 May 2006 15:30:01 -0400 penguintv (1.02-0ubuntu2) dapper; urgency=low * debian/control: - added Dependency on python2.4-gnome2-extras. Thanks Fionn Behrens for letting me know. -- Daniel Holbach Sun, 23 Apr 2006 08:24:44 +0200 penguintv (1.02-0ubuntu1) dapper; urgency=low * Initial release. -- Daniel Holbach Fri, 13 Jan 2006 15:23:06 +0100 PenguinTV-4.2.0/debian/pycompat0000644000000000000000000000000211450721445013256 0ustar 2 PenguinTV-4.2.0/debian/rules0000755000000000000000000000065711450721445012577 0ustar #!/usr/bin/make -f DEB_PYTHON_SYSTEM := pycentral include /usr/share/cdbs/1/rules/debhelper.mk include /usr/share/cdbs/1/class/python-distutils.mk include /usr/share/cdbs/1/rules/simple-patchsys.mk binary-install/penguintv:: rm -f $(DEB_DESTDIR)/usr/share/pyshared/penguintv/feedparser.py rm -f $(DEB_DESTDIR)/usr/share/pyshared-data/penguintv dh_pycentral dh_desktop clean/penguintv:: find . -name '*.pyc' | xargs rm -f PenguinTV-4.2.0/debian/watch0000644000000000000000000000007211450721445012537 0ustar version=3 http://sf.net/penguintv/PenguinTV-(.+)\.tar\.gz PenguinTV-4.2.0/debian/PenguinTV0000644000000000000000000000562711450721445013323 0ustar #!/bin/sh # # Copyright (c) 2008 Owen Williams # You may use and distribute this software under the terms of the # GNU General Public License, version 2 or later # # run-moz code from Conduit (thanks!) ## Work around https://bugs.launchpad.net/ubuntu/+source/firefox/+bug/26436 ## Note: This bug is also present in Mandriva and Fedora, for this reason, ## We will use run-mozilla in all cases ## ## Run mozilla takes care of settin up the appropriate environment variables ## to run a program using mozilla libs if [ "$1"x = "--no-launcher"x ] ; then USE_LAUNCHER=0 shift else USE_LAUNCHER=1 fi HILDON=0 RUNMOZ_DIR="" if [ "$PYTHON"x = "x" ] ; then PYTHON="/usr/bin/env python" fi if [ -f "/usr/lib/libgtkembedmoz.so.0" -a -d "/usr/lib/microb-engine" ]; then HILDON=1 echo "Launching Hildon version" #export MOZILLA_FIVE_HOME="/usr/lib/microb-engine" #export LD_LIBRARY_PATH="/usr/lib/microb-engine" else if [ "$WITH_MOZ_DIR"x = "x" ] ; then for d in `ls -d /usr/lib*/firefox* /usr/lib*/mozilla-firefox* /usr/lib*/xulrunner* 2> /dev/null | grep -v addons | grep -v devel | sort -r` do if [ -x $d/run-mozilla.sh ]; then echo INFO: FOUND FIREFOX LIBS AT $d WITH_MOZ_DIR="$d" break fi done fi fi rundir=`dirname $0` cd $rundir if [ -f "./penguintv/penguintv.py" ] ; then ptv_home="./penguintv" elif [ -f "../penguintv/penguintv.py" ] ; then cd .. ptv_home="./penguintv" elif [ -d "/usr/lib/python2.5/site-packages/penguintv" ] ; then ptv_home="/usr/lib/python2.5/site-packages/penguintv" else ptv_home=`$PYTHON -c "import os, sys; if os.environ.has_key('PENGUINTV_LIB'): print os.environ['PENGUINTV_LIB']; sys.exit(0); for d in [\"$rundir\"] + sys.path: if len(d) > 0: if d[0] == '.': d = os.path.join(os.getcwd(), d); sd = os.path.join(d, 'penguintv'); if os.path.isdir(sd): print sd; sys.exit(0); h, t = os.path.split(os.path.split(os.path.abspath(sys.argv[0]))[0]); if t == 'bin': libdir = os.path.join(h, 'lib'); fp = os.path.join(libdir, 'penguintv'); if os.path.isdir(fp): print libdir; sys.exit(0); sys.exit(1)"` if [ $? -gt 0 ] ; then echo "Error finding PenguinTV library home. Please export PENGUINTV_LIB" exit 1 fi fi echo "Running ptv in $ptv_home" export PTV_PREFIX=$rundir if [ $HILDON -eq 1 ] ; then PYTHONPATH="$ptv_home" cd $ptv_home if [ $USE_LAUNCHER -eq 1 ] ; then run-standalone.sh /usr/bin/python ./penguintv.py $* else run-standalone.sh /usr/bin/python2.5 ./penguintv.py $* fi else PYTHONPATH="$ptv_home" cd $ptv_home if [ "$WITH_MOZ_DIR"x = "x" ] ; then echo "WARNING: COULD NOT FIND FIREFOX LIBRARIES" echo "WARNING: PENGUINTV MAY CRASH UNEXPECTEDLY" echo "Please export WITH_MOZ_DIR with the location of run-mozilla.sh to correct this issue" $PYTHON ./penguintv.py $* else $WITH_MOZ_DIR/run-mozilla.sh $PYTHON ./penguintv.py $* fi fi PenguinTV-4.2.0/MANIFEST0000644000000000000000000000761111303260436011416 0ustar penguintv/ajax/EntryInfoServer.py penguintv/ajax/MyTCPServer.py penguintv/ajax/__init__.py penguintv/ptvbittorrent/DownloaderFeedback.py penguintv/ptvbittorrent/btcompletedir.py penguintv/ptvbittorrent/__init__.py penguintv/ptvbittorrent/spewout.py penguintv/ptvbittorrent/Choker.py penguintv/ptvbittorrent/btmakemetafile.py penguintv/ptvbittorrent/HTTPHandler.py penguintv/ptvbittorrent/testtest.py penguintv/ptvbittorrent/NatCheck.py penguintv/ptvbittorrent/PiecePicker.py penguintv/ptvbittorrent/bitfield.py penguintv/ptvbittorrent/zurllib.py penguintv/ptvbittorrent/StorageWrapper.py penguintv/ptvbittorrent/parseargs.py penguintv/ptvbittorrent/fakeopen.py penguintv/ptvbittorrent/Downloader.py penguintv/ptvbittorrent/bencode.py penguintv/ptvbittorrent/track.py penguintv/ptvbittorrent/btformats.py penguintv/ptvbittorrent/Storage.py penguintv/ptvbittorrent/Rerequester.py penguintv/ptvbittorrent/selectpoll.py penguintv/ptvbittorrent/download.py penguintv/ptvbittorrent/Uploader.py penguintv/ptvbittorrent/Encrypter.py penguintv/ptvbittorrent/RateMeasure.py penguintv/ptvbittorrent/RawServer.py penguintv/ptvbittorrent/Connecter.py penguintv/ptvbittorrent/CurrentRateMeasure.py penguintv/HTTPDownloader.py penguintv/LoginDialog.py penguintv/subProcess.py penguintv/Player.py penguintv/utils.py penguintv/EntryView.py penguintv/AddFeedDialog.py penguintv/AddFeedUtils.py penguintv/EntryList.py penguintv/EntryFormatter.py penguintv/Downloader.py penguintv/FeedList.py penguintv/HildonListener.py penguintv/__init__.py penguintv/OPML.py penguintv/ThreadPool.py penguintv/TagEditorNG.py penguintv/penguintv.py penguintv/MediaManager.py penguintv/MainWindow.py penguintv/ptvDB.py penguintv/UpdateTasksManager.py penguintv/SimpleImageCache.py penguintv/feedparser.py penguintv/PreferencesDialog.py penguintv/BTDownloader.py penguintv/Lucene.py penguintv/FeedPropertiesDialog.py penguintv/AddSearchTagDialog.py penguintv/EditSearchesDialog.py penguintv/FeedFilterDialog.py penguintv/FeedFilterPropertiesDialog.py penguintv/ptv_sync.py penguintv/ptvDbus.py penguintv/GStreamerPlayer.py penguintv/SynchronizeDialog.py penguintv/FilterSelectorDialog.py penguintv/PlanetView.py penguintv/DownloadView.py penguintv/IconManager.py penguintv/PtvTrayIcon.py penguintv/PTVXapian.py penguintv/itunes.py penguintv/Poller.py penguintv/ArticleSync.py penguintv/SqliteSyncClient.py penguintv/S3SyncClient.py penguintv/FtpSyncClient.py penguintv/OfflineImageCache.py penguintv/amazon/S3.py penguintv/amazon/__init__.py penguintv/trayicon/__init__.py penguintv/trayicon/TrayIcon.py penguintv/trayicon/SonataNotification.py penguintv/BeautifulSoup/BeautifulSoup.py penguintv/BeautifulSoup/__init__.py penguintv/html/PTVMozilla.py penguintv/html/PTVGtkHtml.py penguintv/html/PTVhtml.py penguintv/html/__init__.py feedparser/setup.py feedparser/feedparser.py feedparser/LICENSE feedparser/README setup.py PenguinTVActivity.py share/penguintv.gladep share/penguintv.glade.h share/penguintv.schema.in share/defaultsubs.opml share/penguintv.glade share/penguintvicon.png share/penguintv.service share/mozilla.css share/mozilla-planet.css share/mozilla-planet-hildon.css share/gtkhtml.css share/pixmaps/ev_online.png share/pixmaps/ev_offline.png share/pixmaps/throbber.gif share/glade/desktop.glade share/glade/dialog_add_feed.glade share/glade/dialogs.glade share/glade/extra_dialogs.glade share/glade/planet.glade share/glade/standard.glade share/glade/vertical.glade share/glade/widescreen.glade share/icons/stock-go-down.svg share/icons/stock-media-pause.svg share/icons/stock-media-play.svg share/icons/stock-preferences.svg share/icons/stock-stop.svg share/pixmaps/26x26/penguintvicon.png share/pixmaps/40x40/penguintvicon.png share/pixmaps/64x64/penguintvicon.png share/pixmaps/PenguinTV icon.png po/penguintv.pot po/pt.po po/pt_BR.po po/sv.po po/make_pot.sh po/install-po.sh PenguinTV.in LICENSE MANIFEST setup.py README penguintv.desktop penguintv-hildon.desktop CONTRIBUTORS ptv_profile.py PenguinTV-4.2.0/README0000644000000000000000000000437511157234125011154 0ustar PenguinTV Thank you for downloading Penguin TV. This application is an RSS feed reader with support for enclosed media. You can watch video blogs and listen to podcasts with this application. See the website for new features and fixes in this version. == Requirements == PenguinTV is a python program, and requires python version 2.5 or higher. It also requires several libraries and their python versions: * gnome 2.x * python 2.5 or later * pycurl version 7.11 or greater (http://pycurl.sourceforge.net/) * mozilla support for python (gnome-python2-gtkmozembed or python-gnome2-extras) * python imaging library (http://www.pythonware.com/products/pil/) == Optional Libraries == PenguinTV supports some other extra libraries but will run without them: * python-gstreamer for internal media playback * python-xapian for search support == Building == Build with: python setup.py build Building on maemo: inside scratchbox, run ./build_maemo_deb.sh to build a debian package This will also build gtkmozembed support for python for inclusion in the package == Running without Installing == after you've built penguintv, you can type ./bin/PenguinTV to run the program == Installing == Installation should be as simple as running python setup.py install as root. Please email me if you have trouble installing the program. == Running PenguinTV == PenguinTV will be installed in GNOME menus under Internet If you do not have python-gstreamer installed, PenguinTV will use the GNOME or KDE default media player to open downloaded files. If you want to use PenguinTV to transfer files to a portable media player, you can use the Go / Synchronize Media command. == Crashes == PenguinTV uses the same Mozilla technology as Firefox to display your RSS feeds. Unfortunately there are bugs in the way some distributions have created these packages which cause PenguinTV to crash. If PenguinTV crashes on your machine (for instance, if you get a "segmentation fault" or if a window pops up telling you the program crashed), I'd like to know about it. == Development == Please email me at owen-penguintv@ywwg.com with questions, comments, or suggestions. I would welcome help on packaging, translation, or making PenguinTV work better for you. Thanks for watching, Owen Williams PenguinTV-4.2.0/PenguinTV.in0000755000000000000000000000544011177372300012500 0ustar #!/bin/sh # # Copyright (c) 2008 Owen Williams # You may use and distribute this software under the terms of the # GNU General Public License, version 2 or later # # run-moz code from Conduit (thanks!) ## Work around https://bugs.launchpad.net/ubuntu/+source/firefox/+bug/26436 ## Note: This bug is also present in Mandriva and Fedora, for this reason, ## We will use run-mozilla in all cases ## ## Run mozilla takes care of settin up the appropriate environment variables ## to run a program using mozilla libs if [ "$1"x = "--no-launcher"x ] ; then USE_LAUNCHER=0 shift else USE_LAUNCHER=1 fi HILDON=0 RUNMOZ_DIR="" if [ "$PYTHON"x = "x" ] ; then PYTHON="/usr/bin/env python" fi if [ -f "/usr/lib/libgtkembedmoz.so.0" -a -d "/usr/lib/microb-engine" ]; then HILDON=1 echo "Launching Hildon version" #export MOZILLA_FIVE_HOME="/usr/lib/microb-engine" #export LD_LIBRARY_PATH="/usr/lib/microb-engine" else if [ "$WITH_MOZ_DIR"x = "x" ] ; then for d in `ls -d /usr/lib*/firefox* /usr/lib*/mozilla-firefox* /usr/lib*/xulrunner* 2> /dev/null | grep -v addons | grep -v devel | sort -r` do if [ -x $d/run-mozilla.sh ]; then echo INFO: FOUND FIREFOX LIBS AT $d WITH_MOZ_DIR="$d" break fi done fi fi rundir=`dirname $0` cd $rundir if [ -f "./penguintv/penguintv.py" ] ; then ptv_home="./penguintv" elif [ -f "../penguintv/penguintv.py" ] ; then cd .. ptv_home="./penguintv" else ptv_home=`$PYTHON -c "import os, sys; if os.environ.has_key('PENGUINTV_LIB'): print os.environ['PENGUINTV_LIB']; sys.exit(0); for d in [\"$rundir\"] + sys.path: if len(d) > 0: if d[0] == '.': d = os.path.join(os.getcwd(), d); sd = os.path.join(d, 'penguintv'); if os.path.isdir(sd): print sd; sys.exit(0); h, t = os.path.split(os.path.split(os.path.abspath(sys.argv[0]))[0]); if t == 'bin': libdir = os.path.join(h, 'lib'); fp = os.path.join(libdir, 'penguintv'); if os.path.isdir(fp): print libdir; sys.exit(0); sys.exit(1)"` if [ $? -gt 0 ] ; then echo "Error finding PenguinTV library home. Please export PENGUINTV_LIB" exit 1 fi fi echo "Running ptv in $ptv_home" export PTV_PREFIX=$rundir if [ $HILDON -eq 1 ] ; then PYTHONPATH="$ptv_home" cd $ptv_home if [ $USE_LAUNCHER -eq 1 ] ; then run-standalone.sh /usr/bin/python ./penguintv.py $* else run-standalone.sh /usr/bin/python2.5 ./penguintv.py $* fi else PYTHONPATH="$ptv_home" cd $ptv_home if [ "$WITH_MOZ_DIR"x = "x" ] ; then echo "WARNING: COULD NOT FIND FIREFOX LIBRARIES" echo "WARNING: PENGUINTV MAY CRASH UNEXPECTEDLY" echo "Please export WITH_MOZ_DIR with the location of run-mozilla.sh to correct this issue" $PYTHON ./penguintv.py $* else $WITH_MOZ_DIR/run-mozilla.sh $PYTHON ./penguintv.py $* fi fi PenguinTV-4.2.0/PKG-INFO0000644000000000000000000000041011450514774011362 0ustar Metadata-Version: 1.0 Name: PenguinTV Version: 4.2.0 Summary: GNOME-compatible podcast and videoblog reader Home-page: http://penguintv.sourceforge.net Author: Owen Williams Author-email: owen-penguintv@ywwg.com License: GPL Description: UNKNOWN Platform: UNKNOWN PenguinTV-4.2.0/penguintv.desktop0000644000000000000000000000034311303252644013674 0ustar [Desktop Entry] Name=PenguinTV Media Aggregator Comment=Aggregates rss feeds, podcasts, and video blogs TryExec=PenguinTV Exec=PenguinTV %U Terminal=false Type=Application Categories=Application;Network; Icon=penguintvicon.png PenguinTV-4.2.0/penguintv-hildon.desktop0000644000000000000000000000051010752700352015144 0ustar [Desktop Entry] Encoding=UTF-8 Name=PenguinTV Media Aggregator Comment=Aggregates rss feeds, podcasts, and video blogs TryExec=PenguinTV Exec=/usr/bin/PenguinTV Terminal=false Type=Application Categories=Internet Icon=penguintvicon X-Osso-Service=com.ywwg.PenguinTV X-Osso-Type=application/x-executable StartupWMClass=penguintv PenguinTV-4.2.0/setup.py0000644000000000000000000002155011277632704012010 0ustar #!/usr/bin/env python #this file is a catastrophe. I'm sorry. import sys,os from penguintv import subProcess as my_subProcess import subprocess try: from sugar.activity import bundlebuilder HAS_SUGAR = True except: HAS_SUGAR = False try: import hildon HAS_HILDON = True except: HAS_HILDON = False if HAS_SUGAR: try: print "Building OLPC version" sp = my_subProcess.subProcess("cp -f share/penguintv.glade.olpc share/penguintv.glade") if sp.read() != 0: print "There was an error symlinking the glade file" sys.exit(1) bundlebuilder.start("NewsReader", manifest='MANIFEST-OLPC') except Exception, e: print "problem building for OLPC:", e sys.exit(1) sys.exit(0) elif HAS_HILDON: print "Building hildon version" sp = my_subProcess.subProcess("cp -f share/penguintv.glade.hildon share/penguintv.glade") if sp.read() != 0: print "There was an error copying the glade file" sys.exit(1) else: print "Building desktop version" sp = my_subProcess.subProcess("cp -f share/penguintv.glade.desktop share/penguintv.glade") if sp.read() != 0: print "There was an error copying the glade file" sys.exit(1) from distutils.core import setup from distutils.extension import Extension import locale, gettext from penguintv.utils import GlobDirectoryWalker, _mkdir locale.setlocale(locale.LC_ALL, '') gettext.install('penguintv', '/usr/share/locale') gettext.bindtextdomain('penguintv', '/usr/share/locale') gettext.textdomain('penguintv') _=gettext.gettext missing_something = [] try: import gtkmozembed except: #maybe we built gtkmozembed for maemo with build-deb.sh try: from penguintv.ptvmozembed import gtkmozembed except: print "WARNING: gtkmozembed not found. This is usually provided by a package like python-gnome2-extras or gnome-python2-gtkmozembed" print " PenguinTV will still run without gtkmozembed, but the experience isn't as good." #if HAS_HILDON: # missing_something.append("On Maemo, gtkmozembed is created by running ./build_maemo_deb.sh and creating a package") try: import sqlite3 except: try: from pysqlite2 import dbapi2 as sqlite except: missing_something.append("Need pysqlite version 2 or higher (http://pysqlite.org/)") try: import pycurl except: missing_something.append("Need pycurl (http://pycurl.sourceforge.net/)") try: import Image except: missing_something.append("Need python imaging (http://www.pythonware.com/products/pil/)") #try: # import gnome #except: # missing_something.append("Need gnome python bindings") #try: # from xml.sax import saxutils # test = saxutils.DefaultHandler #except: # missing_something.append("Need python-xml") #moz_lib_dir = "" #if os.environ.has_key('MOZILLA_FIVE_HOME'): # moz_lib_dir = os.environ['MOZILLA_FIVE_HOME'] #else: # for moz in ("xulrunner-gtkmozembed", "firefox-gtkmozembed", "mozilla-gtkmozembed"): # sp = my_subProcess.subProcess("pkg-config --silence-errors --libs-only-L %s | sed 's/ //g'" % moz) # lib_dir = sp.read() # if lib_dir != 0: # lib_dir = lib_dir.replace("-L", "") # try: # os.stat(lib_dir) # moz_lib_dir = lib_dir # break # except: # pass # #if moz_lib_dir == "": # if os.path.isfile("/usr/lib/libgtkembedmoz.so.0") and \ # os.path.isdir("/usr/lib/microb-engine"): # moz_lib_dir = "/usr/lib/microb-engine" # #if moz_lib_dir == "": # cmd = "ldd " + gtkmozembed.__file__ + "|grep libgtkembedmoz" # p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # retval = p.wait() # stderr = p.stderr.read() # if retval == 0: # line = p.stdout.read() # lib_dir = line[line.find("/"):line.rfind("libgtkembedmoz.so")] # try: # os.stat(lib_dir) # moz_lib_dir = lib_dir # except: # pass # #if moz_lib_dir == "": # print "Couldn't locate mozilla home. Please set MOZILLA_FIVE_HOME and run setup again" # sys.exit(1) #else: # print "Setting default MOZILLA_FIVE_HOME to", moz_lib_dir code = subprocess.call(["which","msgfmt"]) if code != 0: HAVE_GETTEXT = False print "Need gettext to generate translations -- disabling translations." #missing_something.append("Need gettext") else: HAVE_GETTEXT = True if len(missing_something) > 0: sys.exit("\n".join(missing_something)) try: os.stat("./bin") except: try: os.mkdir("./bin") except: print "Error creating ./bin directory for script" sys.exit(1) f = open("PenguinTV.in", "r") f2 = open("./bin/PenguinTV", "w") for line in f.readlines(): #f2.write(line.replace("##MOZ_LIB_DIR##", moz_lib_dir)) f2.write(line) f2.close() f.close() os.chmod("./bin/PenguinTV", 0775) from penguintv import utils locales = [] if HAVE_GETTEXT: if "build" in sys.argv or "install" in sys.argv: for f in GlobDirectoryWalker("./po", "*.po"): this_locale = os.path.basename(f) this_locale = this_locale[0:this_locale.rfind('.')] _mkdir("./mo/"+this_locale+"/LC_MESSAGES") msgfmt_line = "msgfmt "+f+" -o ./mo/"+this_locale+"/LC_MESSAGES/penguintv.mo" print msgfmt_line locales.append(('share/locale/'+this_locale+'/LC_MESSAGES', ['mo/'+this_locale+'/LC_MESSAGES/penguintv.mo'])) sp = my_subProcess.subProcess(msgfmt_line) if sp.read() != 0: print "There was an error building the MO file for locale "+this_locale sys.exit(1) data_files = [('share/penguintv', ['share/penguintv.glade','share/defaultsubs.opml','share/penguintvicon.png','share/mozilla.css','share/gtkhtml.css','share/mozilla-planet.css','share/mozilla-planet-hildon.css']), ('share/penguintv/glade', ['share/glade/dialogs.glade']), ('share/pixmaps', ['share/penguintvicon.png']), ('share/icons/hicolor/scalable/hildon', ['share/penguintvicon.png']), ('share/icons/hicolor/64x64/hildon', ['share/pixmaps/64x64/penguintvicon.png']), ('share/icons/hicolor/40x40/hildon', ['share/pixmaps/40x40/penguintvicon.png']), ('share/icons/hicolor/26x26/hildon', ['share/pixmaps/26x26/penguintvicon.png']), ('share/penguintv/pixmaps', ['share/pixmaps/ev_online.png', 'share/pixmaps/ev_offline.png', 'share/pixmaps/throbber.gif']), ('share/dbus-1/services', ['share/penguintv.service'])] data_files += locales if utils.RUNNING_HILDON: data_files += [('share/applications/hildon/',['penguintv-hildon.desktop']), ('share/icons/hicolor/scalable/hildon', ['share/penguintvicon.png']), ('share/icons/hicolor/64x64/hildon', ['share/pixmaps/64x64/penguintvicon.png']), ('share/icons/hicolor/40x40/hildon', ['share/pixmaps/40x40/penguintvicon.png']), ('share/icons/hicolor/26x26/hildon', ['share/pixmaps/26x26/penguintvicon.png']), ('share/penguintv/glade', ['share/glade/hildon.glade', 'share/glade/hildon_dialogs.glade', 'share/glade/hildon_dialog_add_feed.glade','share/glade/hildon_planet.glade']),] else: data_files += [('share/applications', ['penguintv.desktop']), ('share/icons/hicolor/scalable/apps', ['share/penguintvicon.png']), ('share/icons/hicolor/64x64/apps', ['share/pixmaps/64x64/penguintvicon.png']), ('share/icons/hicolor/40x40/apps', ['share/pixmaps/40x40/penguintvicon.png']), ('share/icons/hicolor/26x26/apps', ['share/pixmaps/26x26/penguintvicon.png']), ('share/penguintv/glade', ['share/glade/desktop.glade', 'share/glade/standard.glade', 'share/glade/widescreen.glade', 'share/glade/dialog_add_feed.glade', 'share/glade/extra_dialogs.glade', 'share/glade/planet.glade', 'share/glade/vertical.glade']),] setup(name = "PenguinTV", version = utils.VERSION, description = 'GNOME-compatible podcast and videoblog reader', author = 'Owen Williams', author_email = 'owen-penguintv@ywwg.com', url = 'http://penguintv.sourceforge.net', license = 'GPL', scripts = ['bin/PenguinTV'], data_files = data_files, packages = ["penguintv", "penguintv/ptvbittorrent", "penguintv/trayicon", "penguintv/ajax", "penguintv/amazon", "penguintv/html", "penguintv/BeautifulSoup"]) if "install" in sys.argv: sp = my_subProcess.subProcess('''GCONF_CONFIG_SOURCE=$(gconftool-2 --get-default-source) gconftool-2 --makefile-install-rule share/penguintv.schema''') if sp.read() != 0: print sp.outdata print "There was an error installing the gconf schema" sys.exit(1) else: print sp.outdata print "" something_disabled = False try: import gconf except: try: from gnome import gconf except: print "WARNING: gconf not installed or not installed correctly: Gconf support will be disabled" something_disabled = True try: import pygst pygst.require("0.10") import gst except: print "WARNING: gstreamer .10 or greater not installed or not installed correctly: Built-in player will be disabled" something_disabled = True if something_disabled: print """If anything above was disabled and you install that library, PenguinTV will detect it automatically and re-enable support. You do not have to reinstall PenguinTV to enable support for these features""" if "build" in sys.argv: print "You can run ./bin/PenguinTV to run PenguinTV" PenguinTV-4.2.0/CONTRIBUTORS0000644000000000000000000000012210646750274012150 0ustar Owen Williams Daniel Arbuckle j@v2v.cc PenguinTV-4.2.0/po/0000755000000000000000000000000011450514774010710 5ustar PenguinTV-4.2.0/po/pt.po0000644000000000000000000002603610646750274011705 0ustar # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR ORGANIZATION # FIRST AUTHOR , YEAR. # msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "POT-Creation-Date: 2006-02-25 18:31+EST\n" "PO-Revision-Date: 2006-02-21 20:14+EST\n" "Last-Translator: Susana Pereira \n" "Language-Team: Portuguese \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: ENCODING\n" "Generated-By: pygettext.py 1.5\n" #: BTDownloader.py:67 BTDownloader.py:68 msgid "There was an error downloading the torrent" msgstr "Ocorreu um erro ao efectuar o download do torrent" #: BTDownloader.py:105 msgid "Downloaded %(progress)s%% of %(size)s, %(time)s remaining." msgstr "Transferiu %(progress)s%% de %(size)s, %(time)s restantes." #: BTDownloader.py:107 msgid "Downloaded %(progress)s%% of %(size)s" msgstr "Transferiu %(progress)s%% de %(size)s" #: EntryList.py:172 #, fuzzy msgid "_Download" msgstr "_Download" #: EntryList.py:182 utils.py:166 msgid "Delete" msgstr "Apagar" #: EntryList.py:186 #, fuzzy msgid "Mark as _Unviewed" msgstr "Marcar como _Não Visto" #: EntryView.py:276 EntryView.py:282 msgid "Downloading %(progress)d%% of %(size)s..." msgstr "A transferir %(progress)d%% de %(size)s..." #: EntryView.py:286 msgid "Downloading %s..." msgstr "A transferir %s..." #: EntryView.py:313 msgid "There was an error downloading the file." msgstr "Ocorreu um erro ao efectuar o download do ficheiro." #: EntryView.py:324 msgid "Full Entry..." msgstr "Entrada Completa..." #: FeedList.py:20 msgid "Downloaded Media" msgstr "Media tranferidos" #: FeedList.py:20 msgid "No Feeds (Calm Mode)" msgstr "Nenhuma subscrição (Modo Calmo)" #: FeedList.py:20 FeedList.py:36 msgid "All Feeds" msgstr "Todas as subscrições" #: FeedList.py:174 msgid "Please wait..." msgstr "Por favor aguarde..." #: FeedList.py:258 #, fuzzy msgid "There was an error trying to poll this feed." msgstr "Ocorreu um erro ao verificar esta subscrição" #: HTTPDownloader.py:66 HTTPDownloader.py:67 msgid "finished downloading %s" msgstr "terminou o download de %s" #: penguintv.py:164 msgid "Polling feeds for the first time..." msgstr "A verificar subscrições pela primeira vez..." #: penguintv.py:439 msgid "Feeds Updated" msgstr "Subscrições Actualizadas" #: penguintv.py:666 #, fuzzy msgid "Trying to poll feed..." msgstr "A tentar verificar as subscrições..." #: penguintv.py:680 msgid "Error adding feed" msgstr "Erro ao adicionar a subscrição" #: penguintv.py:705 msgid "Feed Added" msgstr "Subscrição Adicionada" #: penguintv.py:761 msgid "" "expected at least half of reported size %(reported_size)s but the file is %" "(actual_size)s bytes." msgstr "" "esperava pelo menos metade do tamanho %(reported_size)s anunciado mas o ficheiro tem %" "(actual_size)s bytes." #: penguintv.py:875 #, fuzzy msgid "No Unviewed Media" msgstr "Não há Media Inéditos" #: penguintv.py:876 msgid "There is no unviewed media to download." msgstr "Não há media não vistos para executar o download." #: penguintv.py:886 #, fuzzy msgid "Large Download" msgstr "Download Grande" #: penguintv.py:887 msgid "" "If PenguinTV downloads all of the unviewed media, \n" "it will take %(space)s. Do you wish to continue?" msgstr "Se o PenguinTV executar o download de todos os media inéditos, \n" "ocupará %(space)s. Deseja continuar?" #: penguintv.py:930 penguintv.py:1059 msgid "Select OPML..." msgstr "Selecionar OPML..." #: penguintv.py:950 msgid "Exporting Feeds..." msgstr "Exportando Subscrições..." #: penguintv.py:965 msgid "Really Delete Feed?" msgstr "Apagar Realmente a Subscrição?" #: penguintv.py:966 msgid "Are you sure you want to delete this feed?" msgstr "Você tem certeza que deseja apagar esta subscrição?" #: penguintv.py:1015 msgid "Re_name" msgstr "Re_nomear" #: penguintv.py:1019 msgid "Edit _Tags" msgstr "Editar E_tiquetas" #: penguintv.py:1027 msgid "Mark as _Viewed" msgstr "Marcar como _Visto" #: penguintv.py:1077 msgid "Importing Feeds, please wait..." msgstr "A importar subscrições, por favor aguarde..." #: utils.py:35 msgid "complete!" msgstr "completo!" #: utils.py:160 msgid "Play" msgstr "Reproduzir" #: utils.py:161 msgid "Download" msgstr "Download" #: utils.py:162 msgid "Download And Play" msgstr "Executar Download e Reproduzir" #: utils.py:163 msgid "Pause" msgstr "Pausar" #: utils.py:164 utils.py:169 msgid "Stop" msgstr "Parar" #: utils.py:165 msgid "Open File" msgstr "Abrir Ficheiro" #: utils.py:167 msgid "Resume" msgstr "Continuar" #: utils.py:168 msgid "Cancel" msgstr "Cancelar" #: utils.py:178 msgid "Re-Download" msgstr "Executar o Download Novamente" #: utils.py:181 msgid "Retry" msgstr "Tentar Novamente" #: ../share/penguintv.glade.h:2 msgid " megabytes" msgstr "megabytes" #: ../share/penguintv.glade.h:3 msgid " minutes" msgstr "minutos" #: ../share/penguintv.glade.h:4 msgid "(c)2005 Owen Williams" msgstr "(c)2005 Owen Williams" #: ../share/penguintv.glade.h:5 msgid "0 bytes" msgstr "0 bytes" #: ../share/penguintv.glade.h:6 msgid "Add Feed" msgstr "Adicionar Subscrição" #: ../share/penguintv.glade.h:7 msgid "BitTorrent Options" msgstr "Opções para BitTorrent" #: ../share/penguintv.glade.h:8 msgid "Edit Tags" msgstr "Editar Etiquetas" #: ../share/penguintv.glade.h:9 msgid "General Options" msgstr "Opções Gerais" #: ../share/penguintv.glade.h:10 msgid "Refresh Mode" msgstr "Modo de Actualização" #: ../share/penguintv.glade.h:11 msgid "Rename Feed" msgstr "Renomear Subscrição" #: ../share/penguintv.glade.h:12 msgid "Add Feed" msgstr "Adicionar Subscrição" #: ../share/penguintv.glade.h:13 msgid "" "All Feeds\n" "Downloaded Media\n" "No Feeds (Calm Mode)" msgstr "" "Todas as Subscrições\n" "Media Transferidos\n" "Nenhuma Subscrição (Modo Calmo)" #: ../share/penguintv.glade.h:16 msgid "Automatically download new media" msgstr "Executar automaticamente o download de novos media" #: ../share/penguintv.glade.h:17 msgid "Automatically resume downloads on startup" msgstr "Resumir automaticamente os downloads ao iniciar" #: ../share/penguintv.glade.h:18 msgid "Autotune refresh period" msgstr "Período de actualização automática de sintonia" #: ../share/penguintv.glade.h:19 msgid "Download Unviewed Media" msgstr "Executar o download dos Media Não Vistos" #: ../share/penguintv.glade.h:20 msgid "Download _Unviewed Media" msgstr "Executar o download dos Media Não Vistos" #: ../share/penguintv.glade.h:21 msgid "E_ntry" msgstr "E_ntrada" #: ../share/penguintv.glade.h:22 msgid "Edit Tags" msgstr "Editar Etiquetas" #: ../share/penguintv.glade.h:23 msgid "Edit _Tags..." msgstr "Editar E_tiquetas" #: ../share/penguintv.glade.h:24 msgid "Fee_d" msgstr "_Subscrição" #: ../share/penguintv.glade.h:25 msgid "Hide Viewed" msgstr "Esconder Vistos" #: ../share/penguintv.glade.h:26 msgid "Limit total disk usage to " msgstr "Limitar uso total do disco a " #: ../share/penguintv.glade.h:27 msgid "Mark As _Unviewed" msgstr "Marcar Como _Não Visto" #: ../share/penguintv.glade.h:28 msgid "Mark As _Viewed" msgstr "Marcar Como _Visto" #: ../share/penguintv.glade.h:29 msgid "Maximum port" msgstr "Porto máximo" #: ../share/penguintv.glade.h:30 msgid "Minimum port" msgstr "Porto mínimo" #: ../share/penguintv.glade.h:32 msgid "None yet" msgstr "Nenhum ainda" #: ../share/penguintv.glade.h:33 msgid "PenguinTV" msgstr "PenguinTV" #: ../share/penguintv.glade.h:34 msgid "PenguinTV Sourceforge Site" msgstr "Site do PenguinTV no Sourceforge" #: ../share/penguintv.glade.h:35 msgid "Play Media With" msgstr "Reproduzir Media Com" #: ../share/penguintv.glade.h:36 msgid "Play Unviewed Media" msgstr "Reproduzir os Media Não Vistos" #: ../share/penguintv.glade.h:37 msgid "Please enter a new name for this feed:" msgstr "Por favor introduza um novo nome para esta subscrição:" #: ../share/penguintv.glade.h:38 msgid "" "Please enter tags you would like to apply to this feed, separated by commas. " msgstr "" "Por favor introduza as etiquetas que você gostaria de aplicar a esta subscrição, " "separadas por vírgulas." #: ../share/penguintv.glade.h:39 msgid "Please enter the URL of the feed you would like to add:" msgstr "Por favor introduza o URL da subscrição que você deseja adicionar:" #: ../share/penguintv.glade.h:40 msgid "Podcast and Video Blog aggregator for GTK+ and GNOME" msgstr "Podcast e agregador de VídeoBlog para GTK+ e GNOME" #: ../share/penguintv.glade.h:41 msgid "Preferences" msgstr "Preferências" #: ../share/penguintv.glade.h:42 msgid "Re_fresh" msgstr "_Actualizar" #: ../share/penguintv.glade.h:43 msgid "Re_name..." msgstr "Re_nomear" #: ../share/penguintv.glade.h:44 msgid "Refresh Feeds" msgstr "Actualizar Subscrições" #: ../share/penguintv.glade.h:45 msgid "Refresh every " msgstr "Actualizar a cada " #: ../share/penguintv.glade.h:46 msgid "Remove Feed" msgstr "Remover Subscrição" #: ../share/penguintv.glade.h:47 msgid "Rename Feed" msgstr "Renomear Subscrição" #: ../share/penguintv.glade.h:48 msgid "Resume _All" msgstr "Continuar _Todos" #: ../share/penguintv.glade.h:49 msgid "Tag Editor" msgstr "Editor de Etiquetas" #: ../share/penguintv.glade.h:50 msgid "Tag Highlighter:" msgstr "Destacar Etiquetas:" #: ../share/penguintv.glade.h:51 msgid "Tags:" msgstr "Etiquetas:" #: ../share/penguintv.glade.h:52 msgid "Tags: " msgstr "Etiquetas:" #: ../share/penguintv.glade.h:53 msgid "URL: " msgstr "URL:" #: ../share/penguintv.glade.h:54 msgid "Upload rate limit" msgstr "Limite da taxa de upload" #: ../share/penguintv.glade.h:55 msgid "Using: " msgstr "A usar: " #: ../share/penguintv.glade.h:56 msgid "_Add Feed..." msgstr "_Adicionar Subscrição..." #: ../share/penguintv.glade.h:57 msgid "_Delete All Media" msgstr "A_pagar Todos os Media" #: ../share/penguintv.glade.h:58 msgid "_Delete Media" msgstr "A_pagar Media" #: ../share/penguintv.glade.h:59 msgid "_Download Media" msgstr "_Transferir Media" #: ../share/penguintv.glade.h:60 msgid "_Export OPML..." msgstr "_Exportar OPML..." #: ../share/penguintv.glade.h:61 msgid "_Go" msgstr "I_r" #: ../share/penguintv.glade.h:62 msgid "_Horizontal Layout" msgstr "Disposição _Horizontal" #: ../share/penguintv.glade.h:63 msgid "_Import OPML..." msgstr "_Importar OPML..." #: ../share/penguintv.glade.h:64 msgid "_Mark As Viewed" msgstr "_Marcar Como Visto" #: ../share/penguintv.glade.h:65 msgid "_Play Media" msgstr "Re_produzir Media" #: ../share/penguintv.glade.h:66 msgid "_Play Unviewed" msgstr "Re_produzir Não Vistos" #: ../share/penguintv.glade.h:67 msgid "_Refresh Feeds" msgstr "A_ctualizar Subscrições" #: ../share/penguintv.glade.h:68 msgid "_Remove Feed" msgstr "_Remover Subscrição" #: ../share/penguintv.glade.h:69 msgid "_Show Today's Downloads..." msgstr "Mo_strar os Downloads do Dia..." #: ../share/penguintv.glade.h:70 msgid "_Standard Layout" msgstr "Di_sposição Padrão" #: ../share/penguintv.glade.h:71 msgid "_Vertical Layout" msgstr "Disposição _Vertical" #: ../share/penguintv.glade.h:72 msgid "_View" msgstr "_Visualização" #~ msgid "_Refresh" #~ msgstr "A_ctualizar" PenguinTV-4.2.0/po/make_pot.sh0000755000000000000000000000067310646750274013057 0ustar cd .. cat MANIFEST MANIFEST-OLPC | sort -u | grep -v feedparser |grep \\\.py | sed -e 's/$/ /' | tr -d '\n' | xargs xgettext --copyright-holder="Owen Williams" --msgid-bugs-address="owen-bugs@ywwg.com" mv messages.po ./po/penguintv.pot cd ./po intltool-extract --type "gettext/glade" ../share/penguintv.glade xgettext -k_ -kN_ -o messages2.pot ../share/penguintv.glade.h cat messages2.pot >> penguintv.pot rm messages2.pot gedit penguintv.pot PenguinTV-4.2.0/po/pt_BR.po0000644000000000000000000002511210646750274012262 0ustar # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR ORGANIZATION # FIRST AUTHOR , YEAR. # msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "POT-Creation-Date: 2006-02-25 18:31+EST\n" "PO-Revision-Date: 2006-05-10 23:13-0500\n" "Last-Translator: Og Maciel \n" "Language-Team: Brazilian Portuguese \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" #: BTDownloader.py:67 #: BTDownloader.py:68 msgid "There was an error downloading the torrent" msgstr "Houve um erro ao abaixar o torrent" #: BTDownloader.py:105 msgid "Downloaded %(progress)s%% of %(size)s, %(time)s remaining." msgstr "Abaixou %(progress)s%% de %(size)s, %(time)s restante." #: BTDownloader.py:107 msgid "Downloaded %(progress)s%% of %(size)s" msgstr "Abaixou %(progress)s%% de %(size)s" #: EntryList.py:172 msgid "_Download" msgstr "Abaixar" #: EntryList.py:182 #: utils.py:166 msgid "Delete" msgstr "Apagar" #: EntryList.py:186 msgid "Mark as _Unviewed" msgstr "Marcar Como Inédito" #: EntryView.py:276 #: EntryView.py:282 msgid "Downloading %(progress)d%% of %(size)s..." msgstr "Abaixando %(progress)d%% de %(size)s..." #: EntryView.py:286 msgid "Downloading %s..." msgstr "Abaixando %s..." #: EntryView.py:313 msgid "There was an error downloading the file." msgstr "Houve um erro abaixando o arquivo." #: EntryView.py:324 msgid "Full Entry..." msgstr "Entrada Completa..." #: FeedList.py:20 msgid "Downloaded Media" msgstr "Abaixou Mídia" #: FeedList.py:20 msgid "No Feeds (Calm Mode)" msgstr "Nenhum Canal (Modo Calmo)" #: FeedList.py:20 #: FeedList.py:36 msgid "All Feeds" msgstr "Todos Canais" #: FeedList.py:174 msgid "Please wait..." msgstr "Por favor esperar..." #: FeedList.py:258 msgid "There was an error trying to poll this feed." msgstr "Houve um erro adicionando este canal" #: HTTPDownloader.py:66 #: HTTPDownloader.py:67 msgid "finished downloading %s" msgstr "terminou de abaixar %s" #: penguintv.py:164 msgid "Polling feeds for the first time..." msgstr "Puxando canais pela primeira vez..." #: penguintv.py:439 msgid "Feeds Updated" msgstr "Canais Atualizados" #: penguintv.py:666 msgid "Trying to poll feed..." msgstr "Tentando puxar canal..." #: penguintv.py:680 msgid "Error adding feed" msgstr "Erro ao adicionar canal" #: penguintv.py:705 msgid "Feed Added" msgstr "Canal Adicionado" #: penguintv.py:761 msgid "expected at least half of reported size %(reported_size)s but the file is %(actual_size)s bytes." msgstr "experado pelo menos metade do tamanho %(reported_size)s anunciado mas o arquivo é %(actual_size)s bytes." #: penguintv.py:875 msgid "No Unviewed Media" msgstr "Nenhuma Mídia Inédita" #: penguintv.py:876 msgid "There is no unviewed media to download." msgstr "Não há nenhuma mídia inédita para abaixar." #: penguintv.py:886 msgid "Large Download" msgstr "Download Grande" #: penguintv.py:887 msgid "" "If PenguinTV downloads all of the unviewed media, \n" "it will take %(space)s. Do you wish to continue?" msgstr "" "Se PenguinTV abaixar toda mídia inédita, \n" "ocupará %(space)s. Você deseja continuar?" #: penguintv.py:930 #: penguintv.py:1059 msgid "Select OPML..." msgstr "Selecionar OPML..." #: penguintv.py:950 msgid "Exporting Feeds..." msgstr "Exportando Canais..." #: penguintv.py:965 msgid "Really Delete Feed?" msgstr "Realmente Apagar Canal?" #: penguintv.py:966 msgid "Are you sure you want to delete this feed?" msgstr "Você tem certeza que deseja apagar este canal?" #: penguintv.py:1015 msgid "Re_name" msgstr "Re_nomear" #: penguintv.py:1019 msgid "Edit _Tags" msgstr "Editar E_tiquetas" #: penguintv.py:1027 msgid "Mark as _Viewed" msgstr "Marcar como _Visto" #: penguintv.py:1077 msgid "Importing Feeds, please wait..." msgstr "Importando Canais, favor esperar..." #: utils.py:35 msgid "complete!" msgstr "completo!" #: utils.py:160 msgid "Play" msgstr "Reproduzir" #: utils.py:161 msgid "Download" msgstr "Abaixar" #: utils.py:162 msgid "Download And Play" msgstr "Abaixar e Reproduzir" #: utils.py:163 msgid "Pause" msgstr "Pausar" #: utils.py:164 #: utils.py:169 msgid "Stop" msgstr "Parar" #: utils.py:165 msgid "Open File" msgstr "Abrir Arquivo" #: utils.py:167 msgid "Resume" msgstr "Resumir" #: utils.py:168 msgid "Cancel" msgstr "Cancelar" #: utils.py:178 msgid "Re-Download" msgstr "Abaixar Novamente" #: utils.py:181 msgid "Retry" msgstr "Tentar Novamente" #: ../share/penguintv.glade.h:2 msgid " megabytes" msgstr "megabytes" #: ../share/penguintv.glade.h:3 msgid " minutes" msgstr "minutos" #: ../share/penguintv.glade.h:4 msgid "(c)2005 Owen Williams" msgstr "(c)2005 Owen Williams" #: ../share/penguintv.glade.h:5 msgid "0 bytes" msgstr "0 bytes" #: ../share/penguintv.glade.h:6 msgid "Add Feed" msgstr "Adicionar Canal" #: ../share/penguintv.glade.h:7 msgid "BitTorrent Options" msgstr "Opções para BitTorrent" #: ../share/penguintv.glade.h:8 msgid "Edit Tags" msgstr "Editar Etiquetas" #: ../share/penguintv.glade.h:9 msgid "General Options" msgstr "Opções Gerais" #: ../share/penguintv.glade.h:10 msgid "Refresh Mode" msgstr "Modo de Recarregar" #: ../share/penguintv.glade.h:11 msgid "Rename Feed" msgstr "Renomear Canal" #: ../share/penguintv.glade.h:12 msgid "Add Feed" msgstr "Adicionar Canal" #: ../share/penguintv.glade.h:13 msgid "" "All Feeds\n" "Downloaded Media\n" "No Feeds (Calm Mode)" msgstr "" "Todos Canais\n" "Mídia abaixada\n" "Nenhum Canal (Modo Calmo)" #: ../share/penguintv.glade.h:16 msgid "Automatically download new media" msgstr "Abaixar automaticamente nova mídia" #: ../share/penguintv.glade.h:17 msgid "Automatically resume downloads on startup" msgstr "Resumir automaticamente downloads ao iniciar" #: ../share/penguintv.glade.h:18 msgid "Autotune refresh period" msgstr "Período de atualização automática de sintonia" #: ../share/penguintv.glade.h:19 msgid "Download Unviewed Media" msgstr "Abaixar Mídia Inédita" #: ../share/penguintv.glade.h:20 msgid "Download _Unviewed Media" msgstr "Abaixar Mídia Inédita" #: ../share/penguintv.glade.h:21 msgid "E_ntry" msgstr "E_ntrada" #: ../share/penguintv.glade.h:22 msgid "Edit Tags" msgstr "Editar Etiquetas" #: ../share/penguintv.glade.h:23 msgid "Edit _Tags..." msgstr "Editar E_tiquetas" #: ../share/penguintv.glade.h:24 msgid "Fee_d" msgstr "Canal" #: ../share/penguintv.glade.h:25 msgid "Hide Viewed" msgstr "Esconder Não Inédito" #: ../share/penguintv.glade.h:26 msgid "Limit total disk usage to " msgstr "Limitar uso total do disco para " #: ../share/penguintv.glade.h:27 msgid "Mark As _Unviewed" msgstr "Marcar Como Inédito" #: ../share/penguintv.glade.h:28 msgid "Mark As _Viewed" msgstr "Marcar Como Visto" #: ../share/penguintv.glade.h:29 msgid "Maximum port" msgstr "Porta máxima" #: ../share/penguintv.glade.h:30 msgid "Minimum port" msgstr "Porta mínima" #: ../share/penguintv.glade.h:32 msgid "None yet" msgstr "Nenhum ainda" #: ../share/penguintv.glade.h:33 msgid "PenguinTV" msgstr "PenguinTV" #: ../share/penguintv.glade.h:34 msgid "PenguinTV Sourceforge Site" msgstr "Site no Sourceforge do PenguinTV" #: ../share/penguintv.glade.h:35 msgid "Play Media With" msgstr "Reproduzir Mídia Com" #: ../share/penguintv.glade.h:36 msgid "Play Unviewed Media" msgstr "Reproduzir Mídia Inédita" #: ../share/penguintv.glade.h:37 msgid "Please enter a new name for this feed:" msgstr "Por favor entre um novo nome para este canal:" #: ../share/penguintv.glade.h:38 msgid "Please enter tags you would like to apply to this feed, separated by commas. " msgstr "Por favor entre as etiquetas que você gostaria de aplicar à este canal, separadas por vírgulas." #: ../share/penguintv.glade.h:39 msgid "Please enter the URL of the feed you would like to add:" msgstr "Por favor entre o URL do canal que você deseja adicionar:" #: ../share/penguintv.glade.h:40 msgid "Podcast and Video Blog aggregator for GTK+ and GNOME" msgstr "Podcast e agregador de VídeoBlog para GTK+ e GNOME" #: ../share/penguintv.glade.h:41 msgid "Preferences" msgstr "Preferências" #: ../share/penguintv.glade.h:42 msgid "Re_fresh" msgstr "Recarregar" #: ../share/penguintv.glade.h:43 msgid "Re_name..." msgstr "Re_nomear" #: ../share/penguintv.glade.h:44 msgid "Refresh Feeds" msgstr "Recarregar Canais" #: ../share/penguintv.glade.h:45 msgid "Refresh every " msgstr "Recarregar a cada " #: ../share/penguintv.glade.h:46 msgid "Remove Feed" msgstr "Remover Canal" #: ../share/penguintv.glade.h:47 msgid "Rename Feed" msgstr "Renomear Canal" #: ../share/penguintv.glade.h:48 msgid "Resume _All" msgstr "Resumir Todos" #: ../share/penguintv.glade.h:49 msgid "Tag Editor" msgstr "Editor de Etiquetas" #: ../share/penguintv.glade.h:50 msgid "Tag Highlighter:" msgstr "Destacador de Etiquetas:" #: ../share/penguintv.glade.h:51 msgid "Tags:" msgstr "Etiquetas:" #: ../share/penguintv.glade.h:52 msgid "Tags: " msgstr "Etiquetas:" #: ../share/penguintv.glade.h:53 msgid "URL: " msgstr "URL:" #: ../share/penguintv.glade.h:54 msgid "Upload rate limit" msgstr "Limite da taxa de upload" #: ../share/penguintv.glade.h:55 msgid "Using: " msgstr "Usando: " #: ../share/penguintv.glade.h:56 msgid "_Add Feed..." msgstr "_Adicionar Canal..." #: ../share/penguintv.glade.h:57 msgid "_Delete All Media" msgstr "Apagar Toda Mídia" #: ../share/penguintv.glade.h:58 msgid "_Delete Media" msgstr "Apagar Mídia" #: ../share/penguintv.glade.h:59 msgid "_Download Media" msgstr "Abaixar Mídia" #: ../share/penguintv.glade.h:60 msgid "_Export OPML..." msgstr "_Exportar OPML..." #: ../share/penguintv.glade.h:61 msgid "_Go" msgstr "Ir" #: ../share/penguintv.glade.h:62 msgid "_Horizontal Layout" msgstr "Disposição _Horizontal" #: ../share/penguintv.glade.h:63 msgid "_Import OPML..." msgstr "_Importar OPML..." #: ../share/penguintv.glade.h:64 msgid "_Mark As Viewed" msgstr "_Marcar Como Visto" #: ../share/penguintv.glade.h:65 msgid "_Play Media" msgstr "Re_produzir Mídia" #: ../share/penguintv.glade.h:66 msgid "_Play Unviewed" msgstr "Re_produzir Inédito" #: ../share/penguintv.glade.h:67 msgid "_Refresh Feeds" msgstr "_Recarregar Canais" #: ../share/penguintv.glade.h:68 msgid "_Remove Feed" msgstr "_Remover Canal" #: ../share/penguintv.glade.h:69 msgid "_Show Today's Downloads..." msgstr "Mo_strar Downloads do Dia..." #: ../share/penguintv.glade.h:70 msgid "_Standard Layout" msgstr "Di_sposição Padrão" #: ../share/penguintv.glade.h:71 msgid "_Vertical Layout" msgstr "Disposição _Vertical" #: ../share/penguintv.glade.h:72 msgid "_View" msgstr "_Visualização" #~ msgid "_Refresh" #~ msgstr "_Recarregar" PenguinTV-4.2.0/po/sv.po0000644000000000000000000003161010646750274011704 0ustar # PenguinTV # Copyright (C) 2006 Owen Williams # Owen Williams , 2006. # msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "POT-Creation-Date: 2006-08-10 12:26+EDT\n" "PO-Revision-Date: 2006-08-14 12:26+CEST\n" "Last-Translator: Mikael Freeman \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8-bit\n" "Generated-By: pygettext.py 1.5\n" #: AddFeedDialog.py:68 msgid "Authorization Required" msgstr "Verifiering krävs" #: AddFeedDialog.py:69 msgid "You must specify a valid username and password in order to add this feed." msgstr "DU måste fylla i ett giltigt användarnamn och lösenord för att lägga till denna feeden" #: AddFeedDialog.py:81 msgid "No Feed in Page" msgstr "Inga feeds på sidan" #: AddFeedDialog.py:82 msgid "" "PenguinTV couldn't find a feed in the web page you provided.\n" "You will need to find the RSS feed link in the web page yourself. Sorry." msgstr "PenguinTV kunde inte hitta en feed på websidan du angav.\n" "Du måste hitta länken till RSS feeden på websidan själv" #: BTDownloader.py:61 BTDownloader.py:62 msgid "There was an error downloading the torrent" msgstr "Det uppstod ett fel vid nerladdningen av torrenten" #: BTDownloader.py:104 msgid "Downloaded %(progress)s%% of %(size)s, %(time)s remaining." msgstr "Nerladdat %(progress)s%% av %(storlek)ar, %(timme)ar återstår." #: BTDownloader.py:106 HTTPDownloader.py:109 msgid "Downloaded %(progress)s%% of %(size)s" msgstr "Nerladdat %(progress)s%% av %(size)s" #: EntryList.py:27 msgid "Articles" msgstr "Artiklar" #: EntryList.py:232 msgid "_Download" msgstr "Ladda ner" #: EntryList.py:242 utils.py:203 msgid "Delete" msgstr "Ta bort" #: EntryList.py:247 msgid "Mark as _Viewed" msgstr "Markera som _sedd" #: EntryList.py:251 msgid "Mark as _Unviewed" msgstr "Markera som _icke sedd" #: EntryView.py:318 msgid "Loading images..." msgstr "Laddar bilder..." #: EntryView.py:431 EntryView.py:441 msgid "Downloaded %(progress)d%% of %(size)s" msgstr "Nerladdat %(progress)%% av %(size)s" #: EntryView.py:435 msgid "Download queued" msgstr "Nerladdning i kö" #: EntryView.py:445 msgid "Downloading %s..." msgstr "Laddar ner %s..." #: EntryView.py:473 msgid "There was an error downloading the file." msgstr "Det blev ett fel vid nerladdning av filen" #: EntryView.py:484 msgid "Full Entry..." msgstr "Hela Listan.." #: FeedList.py:17 msgid "Active Downloads" msgstr "Aktiva Nerladdningar" #: FeedList.py:17 msgid "Downloaded Media" msgstr "Nerladdad Media" #: FeedList.py:17 msgid "No Feeds (Calm Mode)" msgstr "Inga Feeds" #: FeedList.py:17 FeedList.py:44 msgid "All Feeds" msgstr "Alla Feeds" #: FeedList.py:53 msgid "Feeds" msgstr "Feeds" #: FeedList.py:187 msgid "Loading Feeds..." msgstr "Laddar Feeds..." #: FeedList.py:193 msgid "Reloading Feeds..." msgstr "Laddar om Feeds..." #: FeedList.py:314 msgid "Please wait..." msgstr "Var vänlig vänta..." #: FeedList.py:467 msgid "There was an error trying to poll this feed." msgstr "Det uppstod ett fel vid gransking av denna feeden" #: HTTPDownloader.py:66 msgid "404: File Not Found" msgstr "404: Filen Hittades Inte" #: HTTPDownloader.py:69 msgid "Some HTTP error: %(response)s" msgstr "HTTP fel: %(response)s" #: HTTPDownloader.py:74 HTTPDownloader.py:76 msgid "finished downloading %s" msgstr "slutförde nerladdning av %s" #: penguintv.py:175 msgid "Polling feeds for the first time..." msgstr "Granskar feeds för första gången... " #: penguintv.py:348 msgid "Polling Feeds..." msgstr "granskar feeds" #: penguintv.py:351 penguintv.py:1109 msgid "Feeds Updated" msgstr "Feeds Uppdaterade" #: penguintv.py:525 msgid "No Unviewed Media" msgstr "Ingen Icke Visad Media" #: penguintv.py:526 msgid "There is no unviewed media to download." msgstr "Det finns ingen icke visad media att ladda ner" #: penguintv.py:536 msgid "Large Download" msgstr "Stor nerladdning" #: penguintv.py:537 msgid "" "If PenguinTV downloads all of the unviewed media, \n" "it will take %(space)s. Do you wish to continue?" msgstr "OM PenguinTV laddar ner all icke seed media, \n" "kommer de att ta %(space)s. Vill du fortsätta?" #: penguintv.py:557 msgid "Select OPML..." msgstr "Välj OPML..." #: penguintv.py:577 msgid "Exporting Feeds..." msgstr "Exporterar Feeds..." #: penguintv.py:588 msgid "Really Delete Feed?" msgstr "Vill Du Verkligen Ta Bort Feeden?" #: penguintv.py:589 msgid "Are you sure you want to delete this feed?" msgstr "Är du säker du vill ta bort den här feeden?" #: penguintv.py:626 msgid "Importing OPML file" msgstr "importerar OPML fil" #: penguintv.py:627 msgid "Loading the feeds from the OPML file" msgstr "Laddar feeds from OPML filen" #: penguintv.py:674 msgid "Finishing OPML import" msgstr "Avslutar OPML importering" #: penguintv.py:737 msgid "Polling Feed..." msgstr "Granskar Feed..." #: penguintv.py:741 msgid "Feed Updated" msgstr "Feed Uppdaterad" #: penguintv.py:824 msgid "Trying to poll feed..." msgstr "Försöker att granska feed..." #: penguintv.py:863 msgid "Error adding feed" msgstr "Fel vid tilläggande av feed" #: penguintv.py:869 msgid "Feed Added" msgstr "Feed tillagd" #: penguintv.py:963 msgid "WARNING: Expected %(reported_size)s bytes but the file is %(actual_size)s bytes." msgstr "VARNING: Förväntade sig %(reported_size)s bytes med filen is %(actual_size)s bytes." #: utils.py:43 msgid "complete!" msgstr "färdig!" #: utils.py:197 msgid "Play" msgstr "Spela" #: utils.py:198 msgid "Download" msgstr "Ladda ner" #: utils.py:199 msgid "Download And Play" msgstr "Ladda Ner Och Spela" #: utils.py:200 msgid "Pause" msgstr "Pause" #: utils.py:201 utils.py:205 msgid "Cancel" msgstr "Avbryt" #: utils.py:202 msgid "Open File" msgstr "Öppna Fil" #: utils.py:204 msgid "Resume" msgstr "Forsätt" #: utils.py:206 msgid "Stop" msgstr "Stopp" #: utils.py:215 msgid "Re-Download" msgstr "Ladda Ner Igen" #: utils.py:218 msgid "Retry" msgstr "Försök Igen" #: ../share/penguintv.glade.h:2 msgid " megabytes" msgstr "megabytes" #: ../share/penguintv.glade.h:3 msgid " minutes" msgstr "minuter" #: ../share/penguintv.glade.h:4 msgid "(c)2005 Owen Williams" msgstr "(c)2005 Owen Williams" #: ../share/penguintv.glade.h:5 msgid "0 bytes" msgstr "0 bytes" #: ../share/penguintv.glade.h:6 msgid "Add Feed" msgstr "Lägg till Feed" #: ../share/penguintv.glade.h:7 msgid "BitTorrent Options" msgstr "BitTorrent Val" #: ../share/penguintv.glade.h:8 msgid "Edit Tags" msgstr "Editera Tagar" #: ../share/penguintv.glade.h:9 msgid "General Options" msgstr "Allmäna Val" #: ../share/penguintv.glade.h:10 msgid "Login Required" msgstr "Inloggning Krävs" #: ../share/penguintv.glade.h:11 msgid "Refresh Mode" msgstr "Omladdningsläge" #: ../share/penguintv.glade.h:12 msgid "Rename Feed" msgstr "Byt Namn På Feed" #: ../share/penguintv.glade.h:13 msgid "A username and password are required for this feed:" msgstr "Ett användarnamn och lösenord krävs för denna feeden:" #: ../share/penguintv.glade.h:14 msgid "Add Feed" msgstr "Lägg Till Feed" #: ../share/penguintv.glade.h:15 msgid "" "All Feeds\n" "Downloaded Media\n" "No Feeds (Calm Mode)" msgstr "Alla Feeds\n" "Nerladdad Media\n" "Inga Feeds (Calm Mode)" #: ../share/penguintv.glade.h:18 msgid "Automatically download new media" msgstr "Ladda ner automatiskt ny media" #: ../share/penguintv.glade.h:19 msgid "Autotune refresh period" msgstr "Automatisera omladdningsperiod" #: ../share/penguintv.glade.h:20 msgid "Changing Layouts..." msgstr "Byt Layout..." #: ../share/penguintv.glade.h:21 msgid "Download Unviewed Media" msgstr "Ladda Ner Icke Seed Media" #: ../share/penguintv.glade.h:22 msgid "Download _Unviewed Media" msgstr "Ladda Ner _Icke sedd Media" #: ../share/penguintv.glade.h:23 msgid "E_ntry" msgstr "L_ista" #: ../share/penguintv.glade.h:24 msgid "Edit Tags" msgstr "Editera Tagar" #: ../share/penguintv.glade.h:25 msgid "Edit _Tags..." msgstr "Editera _Tagar..." #: ../share/penguintv.glade.h:26 msgid "Fee_d" msgstr "Fee_d" #: ../share/penguintv.glade.h:27 msgid "Hide Viewed" msgstr "Göm sedda" #: ../share/penguintv.glade.h:28 msgid "Limit total disk usage to " msgstr "Begränsa diskutrymme till" #: ../share/penguintv.glade.h:29 msgid "Login Required" msgstr "Inloggning Krävs" #: ../share/penguintv.glade.h:30 msgid "Mark As _Unviewed" msgstr "Markera Som _Icke Sedd" #: ../share/penguintv.glade.h:31 msgid "Mark As _Viewed" msgstr "Markera Som _visad" #: ../share/penguintv.glade.h:32 msgid "Maximum port" msgstr "Sista porten" #: ../share/penguintv.glade.h:33 msgid "Minimum port" msgstr "Första porten" #: ../share/penguintv.glade.h:35 msgid "None yet" msgstr "Ingen ännu" #: ../share/penguintv.glade.h:36 msgid "Password: " msgstr "Lösenord: " #: ../share/penguintv.glade.h:37 msgid "PenguinTV" msgstr "PenguinTV" #: ../share/penguintv.glade.h:38 msgid "PenguinTV Sourceforge Site" msgstr "PenguinTV Sourceforge Sida" #: ../share/penguintv.glade.h:39 msgid "Play Unviewed Media" msgstr "Spela Icke Sedd Media" #: ../share/penguintv.glade.h:40 msgid "Please enter a new name for this feed:" msgstr "Var vänlig ange ett nytt namn för denna feeden:" #: ../share/penguintv.glade.h:41 msgid "" "Please enter tags you would like to apply to this feed, separated by commas. " msgstr "Var vänlig att skriva in tagar Du villl. lägg till feeden, separerade av kommatecken. " #: ../share/penguintv.glade.h:42 msgid "" "Please enter the URL of the feed you would like to add; and any tags you " "would like to apply to this feed, separated by commas:" msgstr "Var vänlig att fylla i url:en på feeden du vill lägga till; och tagar du " "vill lägga till denna feeden, separerade av kommatecken:" #: ../share/penguintv.glade.h:43 msgid "Please wait while PenguinTV changes layouts..." msgstr "Var vänlig vänta medans PenguinTV byter layout..." #: ../share/penguintv.glade.h:44 msgid "Podcast and Video Blog aggregator for GTK+ and GNOME" msgstr "Podcast och Video Blog aggregator för GTK+ och GNOME" #: ../share/penguintv.glade.h:45 msgid "Poll all feeds on startup" msgstr "Granska alla feeds vid upstart" #: ../share/penguintv.glade.h:46 msgid "Preferences" msgstr "Inställningar" #: ../share/penguintv.glade.h:47 msgid "Re_fresh" msgstr "Ladda_Om" #: ../share/penguintv.glade.h:48 msgid "Re_name..." msgstr "Byt_namn..." #: ../share/penguintv.glade.h:49 msgid "Refresh Feeds" msgstr "Ladda Om Feeds" #: ../share/penguintv.glade.h:50 msgid "Refresh every " msgstr "Ladda om varje " #: ../share/penguintv.glade.h:51 msgid "Remove Feed" msgstr "Ta Bort Feed" #: ../share/penguintv.glade.h:52 msgid "Rename Feed" msgstr "Byt Namn På Feed" #: ../share/penguintv.glade.h:53 msgid "Resume _All" msgstr "Fortsätt _Alla" #: ../share/penguintv.glade.h:54 msgid "Resume downloads on startup" msgstr "Fortsätt nerladdningar vid uppstart" #: ../share/penguintv.glade.h:55 msgid "Stop All Active Downloads" msgstr "Stoppa Alla Aktiva Nerladdningar" #: ../share/penguintv.glade.h:56 msgid "Tag Editor" msgstr "Tag Editor" #: ../share/penguintv.glade.h:57 msgid "Tag Highlighter:" msgstr "Framhäv Tag:" #: ../share/penguintv.glade.h:58 msgid "Tags:" msgstr "Tagar:" #: ../share/penguintv.glade.h:59 msgid "Tags: " msgstr "Tagar: " #: ../share/penguintv.glade.h:60 msgid "URL: " msgstr "URL: " #: ../share/penguintv.glade.h:61 msgid "Upload rate limit" msgstr "Uppladningsgräns" #: ../share/penguintv.glade.h:62 msgid "User Name: " msgstr "Användarnamn: " #: ../share/penguintv.glade.h:63 msgid "Using: " msgstr "Använder: " #: ../share/penguintv.glade.h:64 msgid "_Add Feed..." msgstr "_Lägg Till Feed..." #: ../share/penguintv.glade.h:65 msgid "_Delete All Media" msgstr "_Tab Bort All Media" #: ../share/penguintv.glade.h:66 msgid "_Delete Media" msgstr "_Ta Bort Media" #: ../share/penguintv.glade.h:67 msgid "_Download Media" msgstr "_Nerladdad Media" #: ../share/penguintv.glade.h:68 msgid "_Export OPML..." msgstr "_Exportera OPML..." #: ../share/penguintv.glade.h:69 msgid "_Go" msgstr "_Kör" #: ../share/penguintv.glade.h:70 msgid "_Horizontal Layout" msgstr "Horisontell Layout" #: ../share/penguintv.glade.h:71 msgid "_Import OPML..." msgstr "_Importera OPML..." #: ../share/penguintv.glade.h:72 msgid "_Mark As Viewed" msgstr "_Markera som sedd" #: ../share/penguintv.glade.h:73 msgid "_Play Media" msgstr "Spela Media" #: ../share/penguintv.glade.h:74 msgid "_Play Unviewed" msgstr "_Spela icke sedd" #: ../share/penguintv.glade.h:75 msgid "_Refresh Feeds" msgstr "_Ladda om Feeds" #: ../share/penguintv.glade.h:76 msgid "_Remove Feed" msgstr "_Ta bort Feed" #: ../share/penguintv.glade.h:77 msgid "_Show Today's Downloads..." msgstr "_Visa Dagens Nerladdningar..." #: ../share/penguintv.glade.h:78 msgid "_Standard Layout" msgstr "_Standard Layout" #: ../share/penguintv.glade.h:79 msgid "_Vertical Layout" msgstr "_Vertikal Layout" #: ../share/penguintv.glade.h:80 msgid "_View" msgstr "_Visa" #: ../share/penguintv.glade.h:81 msgid "a" msgstr "en" PenguinTV-4.2.0/po/install-po.sh0000755000000000000000000000022610646750274013334 0ustar locale_name=$1 mkdir -p /usr/share/locale/$locale_name/LC_MESSAGES/ msgfmt $locale_name.po -o /usr/share/locale/$locale_name/LC_MESSAGES/penguintv.mo PenguinTV-4.2.0/po/penguintv.pot0000644000000000000000000006077410646750274013474 0ustar # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR Owen Williams # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR , YEAR. # #: penguintv/DownloadView.py:78 penguintv/FeedList.py:124 #: penguintv/GStreamerPlayer.py:96 #, fuzzy msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: owen-bugs@ywwg.com\n" "POT-Creation-Date: 2007-07-06 09:53-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: 8bit\n" #: PenguinTVActivity.py:48 penguintv/FeedList.py:104 msgid "Feeds" msgstr "" #: penguintv/AddFeedDialog.py:114 msgid "Authorization Required" msgstr "" #: penguintv/AddFeedDialog.py:115 msgid "" "You must specify a valid username and password in order to add this feed." msgstr "" #: penguintv/AddFeedDialog.py:129 msgid "No Feed in Page" msgstr "" #: penguintv/AddFeedDialog.py:130 msgid "" "PenguinTV couldn't find a feed in the web page you provided.\n" "You will need to find the RSS feed link in the web page yourself. Sorry." msgstr "" #: penguintv/AddFeedDialog.py:312 msgid "Choose Feed" msgstr "" #: penguintv/AddFeedDialog.py:314 msgid "Please choose one of the feeds in this page" msgstr "" #: penguintv/AddSearchTagDialog.py:49 msgid "Tag Name Already Exists" msgstr "" #: penguintv/AddSearchTagDialog.py:50 msgid "That tag name is already in use. Please choose a different name." msgstr "" #: penguintv/BTDownloader.py:52 penguintv/BTDownloader.py:54 msgid "There was an error downloading the torrent" msgstr "" #: penguintv/BTDownloader.py:91 #, python-format msgid "Downloaded %(progress)s%% of %(size)s, %(time)s remaining." msgstr "" #: penguintv/BTDownloader.py:93 penguintv/HTTPDownloader.py:138 #, python-format msgid "Downloaded %(progress)s%% of %(size)s" msgstr "" #: penguintv/BTDownloader.py:117 #, python-format msgid "Finished downloading %(filename)s" msgstr "" #: penguintv/DownloadView.py:85 msgid "Description" msgstr "" #: penguintv/DownloadView.py:92 msgid "Progress" msgstr "" #: penguintv/DownloadView.py:99 msgid "Size" msgstr "" #: penguintv/DownloadView.py:106 msgid "Status" msgstr "" #: penguintv/DownloadView.py:162 penguintv/DownloadView.py:169 #: penguintv/DownloadView.py:199 penguintv/DownloadView.py:297 #: penguintv/DownloadView.py:305 msgid "Paused" msgstr "" #: penguintv/DownloadView.py:164 penguintv/DownloadView.py:171 #: penguintv/DownloadView.py:203 penguintv/DownloadView.py:300 #: penguintv/DownloadView.py:307 msgid "Queued" msgstr "" #: penguintv/EditSearchesDialog.py:140 msgid "New Query" msgstr "" #: penguintv/EditSearchesDialog.py:141 msgid "New Tag" msgstr "" #: penguintv/EntryList.py:56 msgid "Articles" msgstr "" #: penguintv/EntryList.py:417 msgid "_Download" msgstr "" #: penguintv/EntryList.py:427 penguintv/utils.py:335 msgid "Delete" msgstr "" #: penguintv/EntryList.py:432 penguintv/MainWindow.py:796 msgid "Mark as _Viewed" msgstr "" #: penguintv/EntryList.py:436 msgid "Mark as _Unviewed" msgstr "" #: penguintv/EntryView.py:421 msgid "Loading images..." msgstr "" #: penguintv/EntryView.py:548 msgid "Full Entry..." msgstr "" #: penguintv/EntryView.py:561 penguintv/EntryView.py:585 #, python-format msgid "Downloading %s..." msgstr "" #: penguintv/EntryView.py:571 penguintv/EntryView.py:581 #, python-format msgid "Downloaded %(progress)d%% of %(size)s" msgstr "" #: penguintv/EntryView.py:575 msgid "Download queued" msgstr "" #: penguintv/EntryView.py:614 msgid "There was an error downloading the file." msgstr "" #: penguintv/FeedFilterPropertiesDialog.py:36 msgid "Filter Already Exists" msgstr "" #: penguintv/FeedFilterPropertiesDialog.py:37 msgid "" "A filter already exists for that feed and query. Please choose a different " "query." msgstr "" #: penguintv/FeedList.py:21 penguintv/FeedList.py:89 #: penguintv/MainWindow.py:1254 msgid "All Feeds" msgstr "" #: penguintv/FeedList.py:21 msgid "Downloaded Media" msgstr "" #: penguintv/FeedList.py:21 penguintv/MainWindow.py:1258 msgid "Notifying Feeds" msgstr "" #: penguintv/FeedList.py:21 msgid "Search Results" msgstr "" #: penguintv/FeedList.py:132 msgid "Image" msgstr "" #: penguintv/FeedList.py:780 msgid "No Homepage" msgstr "" #: penguintv/FeedList.py:781 msgid "" "There is no homepage associated with this feed. You can set one in the feed " "properties." msgstr "" #: penguintv/FeedList.py:872 penguintv/FeedList.py:913 #: penguintv/MainWindow.py:1028 penguintv/penguintv.py:291 msgid "Please wait..." msgstr "" #: penguintv/FeedList.py:997 msgid "There was an error trying to poll this feed." msgstr "" #: penguintv/FeedPropertiesDialog.py:78 msgid "Momentarily" msgstr "" #: penguintv/FeedPropertiesDialog.py:83 #, python-format msgid "in approx %(hours)sh %(mins)sm" msgstr "" #: penguintv/FeedPropertiesDialog.py:202 msgid "URL Already in Use" msgstr "" #: penguintv/FeedPropertiesDialog.py:203 msgid "A feed already exists with that URL. Please use a different URL." msgstr "" #: penguintv/FilterSelectorDialog.py:30 msgid "Favorites" msgstr "" #: penguintv/FilterSelectorDialog.py:42 penguintv/MainWindow.py:1300 msgid "All Tags" msgstr "" #: penguintv/GStreamerPlayer.py:102 msgid "Playlist" msgstr "" #: penguintv/HTTPDownloader.py:65 msgid "404: File Not Found" msgstr "" #: penguintv/HTTPDownloader.py:68 #, python-format msgid "Some HTTP error: %(response)s" msgstr "" #: penguintv/HTTPDownloader.py:81 #, python-format msgid "FTP error: %(response)s" msgstr "" #: penguintv/HTTPDownloader.py:84 #, python-format msgid "Unexpected FTP response: %(response)s" msgstr "" #: penguintv/HTTPDownloader.py:86 msgid "Unknown protocol" msgstr "" #: penguintv/HTTPDownloader.py:94 penguintv/HTTPDownloader.py:98 #, python-format msgid "finished downloading %s" msgstr "" #: penguintv/HTTPDownloader.py:136 #, python-format msgid "Downloaded %(dl_now)s..." msgstr "" #: penguintv/MainWindow.py:151 msgid "Feed Added" msgstr "" #: penguintv/MainWindow.py:154 msgid "Error adding feed" msgstr "" #: penguintv/MainWindow.py:159 msgid "Feed Updated" msgstr "" #: penguintv/MainWindow.py:301 msgid "Really delete feed?" msgstr "" #: penguintv/MainWindow.py:312 msgid "Remove Feed" msgstr "" #: penguintv/MainWindow.py:349 msgid "Add Feed" msgstr "" #: penguintv/MainWindow.py:355 msgid "Preferences" msgstr "" #: penguintv/MainWindow.py:433 msgid "Feeds" msgstr "" #: penguintv/MainWindow.py:445 msgid "Player" msgstr "" #: penguintv/MainWindow.py:449 msgid "Downloads" msgstr "" #: penguintv/MainWindow.py:673 #, python-format msgid "%(title)s Filtered" msgstr "" #: penguintv/MainWindow.py:675 msgid "No Feed Selected" msgstr "" #: penguintv/MainWindow.py:676 msgid "Please select the feed you would like to filter" msgstr "" #: penguintv/MainWindow.py:784 msgid "Lucene required for feed filters" msgstr "" #: penguintv/MainWindow.py:802 msgid "_Delete All Media" msgstr "" #: penguintv/MainWindow.py:808 msgid "_Remove Feed" msgstr "" #: penguintv/MainWindow.py:822 msgid "_Create Feed Filter" msgstr "" #: penguintv/MainWindow.py:899 penguintv/penguintv.py:902 msgid "Select OPML..." msgstr "" #: penguintv/MainWindow.py:918 msgid "Importing Feeds, please wait..." msgstr "" #: penguintv/MainWindow.py:993 penguintv/MainWindow.py:1000 #, python-format msgid "Player (%d)" msgstr "" #: penguintv/MainWindow.py:1040 msgid "Really Delete Feed?" msgstr "" #: penguintv/MainWindow.py:1041 msgid "Are you sure you want to delete this feed?" msgstr "" #: penguintv/MainWindow.py:1276 penguintv/MainWindow.py:1287 #, python-format msgid "tag: %s" msgstr "" #: penguintv/MainWindow.py:1321 msgid "Edit Favorite Tags..." msgstr "" #: penguintv/MainWindow.py:1437 #, python-format msgid "" "Downloaded %(percent)d%% of %(files)d file%(s)s, %(queued)d queued %(total)s" msgstr "" #: penguintv/MainWindow.py:1439 #, python-format msgid "Downloaded %(percent)d%% of %(files)d file%(s)s %(total)s" msgstr "" #: penguintv/MainWindow.py:1450 #, python-format msgid "Downloads (%d)" msgstr "" #: penguintv/penguintv.py:292 msgid "Reindexing Feeds..." msgstr "" #: penguintv/penguintv.py:333 msgid "Polling feeds for the first time..." msgstr "" #: penguintv/penguintv.py:538 msgid "Polling Feeds..." msgstr "" #: penguintv/penguintv.py:541 penguintv/penguintv.py:1776 msgid "Feeds Updated" msgstr "" #: penguintv/penguintv.py:708 msgid "Loading Feeds..." msgstr "" #: penguintv/penguintv.py:760 msgid "Enclosures Disabled" msgstr "" #: penguintv/penguintv.py:855 msgid "No Unviewed Media" msgstr "" #: penguintv/penguintv.py:856 msgid "There is no unviewed media to download." msgstr "" #: penguintv/penguintv.py:867 msgid "Large Download" msgstr "" #: penguintv/penguintv.py:868 #, python-format msgid "" "If PenguinTV downloads all of the unviewed media, \n" "it will take %(space)s. Do you wish to continue?" msgstr "" #: penguintv/penguintv.py:881 msgid "Not Enough Free Space" msgstr "" #: penguintv/penguintv.py:882 #, python-format msgid "" "PenguinTV was unable to free enough disk space to download %(space)s of " "media." msgstr "" #: penguintv/penguintv.py:923 msgid "Exporting Feeds..." msgstr "" #: penguintv/penguintv.py:960 msgid "Importing OPML file" msgstr "" #: penguintv/penguintv.py:961 msgid "Loading the feeds from the OPML file" msgstr "" #: penguintv/penguintv.py:1031 msgid "Finishing OPML import" msgstr "" #: penguintv/penguintv.py:1115 msgid "Unknown file type" msgstr "" #: penguintv/penguintv.py:1131 msgid "Polling Feed..." msgstr "" #: penguintv/penguintv.py:1418 msgid "Trying to poll feed..." msgstr "" #: penguintv/penguintv.py:1757 msgid "Trouble connecting to the internet" msgstr "" #: penguintv/penguintv.py:1782 #, python-format msgid "Polling Feeds... (%(polled)d/%(total)d)" msgstr "" #: penguintv/PlanetView.py:247 penguintv/PlanetView.py:298 msgid "" "There was an error displaying the search results. Please reindex searches " "and try again" msgstr "" #: penguintv/PlanetView.py:290 msgid "No entries match those search criteria" msgstr "" #: penguintv/ptvDB.py:667 msgid "Waiting for first poll" msgstr "" #: penguintv/ptvDB.py:668 msgid "" "This feed has not yet been polled successfully. There might be an error " "with this feed.
" msgstr "" #: penguintv/ptv_sync.py:51 msgid "Building file list..." msgstr "" #: penguintv/ptv_sync.py:75 #, python-format msgid "Removing %(filename)s" msgstr "" #: penguintv/ptv_sync.py:98 #, python-format msgid "%(filename)s already exists" msgstr "" #: penguintv/ptv_sync.py:103 #, python-format msgid "Copying %(filename)s" msgstr "" #: penguintv/ptv_sync.py:116 msgid "Removing empty folders..." msgstr "" #: penguintv/ptv_sync.py:121 msgid "Synchronization cancelled" msgstr "" #: penguintv/ptv_sync.py:123 msgid "Copying Complete" msgstr "" #: penguintv/PtvTrayIcon.py:53 msgid "_Play" msgstr "" #: penguintv/PtvTrayIcon.py:53 msgid "Play Media" msgstr "" #: penguintv/PtvTrayIcon.py:54 msgid "_Pause" msgstr "" #: penguintv/PtvTrayIcon.py:54 msgid "Pause Media" msgstr "" #: penguintv/PtvTrayIcon.py:55 msgid "_Refresh" msgstr "" #: penguintv/PtvTrayIcon.py:55 msgid "Refresh feeds" msgstr "" #: penguintv/PtvTrayIcon.py:56 msgid "_About" msgstr "" #: penguintv/PtvTrayIcon.py:56 msgid "About PenguinTV" msgstr "" #: penguintv/PtvTrayIcon.py:57 msgid "_Quit" msgstr "" #: penguintv/PtvTrayIcon.py:57 msgid "Quit PenguinTV" msgstr "" #: penguintv/PtvTrayIcon.py:63 msgid "Show Notifications" msgstr "" #: penguintv/PtvTrayIcon.py:64 msgid "Show feed and download updates" msgstr "" #: penguintv/PtvTrayIcon.py:81 penguintv/PtvTrayIcon.py:94 #, python-format msgid "PenguinTV Version %(version)s" msgstr "" #: penguintv/PtvTrayIcon.py:139 msgid "Download Complete" msgstr "" #: penguintv/PtvTrayIcon.py:143 #, python-format msgid "%(feed_title)s: %(entry_title)s" msgstr "" #: penguintv/SynchronizeDialog.py:81 msgid "Select Destination Folder..." msgstr "" #: penguintv/SynchronizeDialog.py:101 penguintv/SynchronizeDialog.py:191 msgid "Destination Error" msgstr "" #: penguintv/SynchronizeDialog.py:102 msgid "" "The destination you have selected is not valid. \n" "Please select another destination and try again." msgstr "" #: penguintv/SynchronizeDialog.py:192 msgid "" "The destination you have selected is not valid. Please select another " "destination and try again." msgstr "" #: penguintv/SynchronizeDialog.py:269 msgid "Cancelling..." msgstr "" #: penguintv/TagEditorNG.py:173 penguintv/TagEditorNG.py:226 msgid "Rename Tag" msgstr "" #: penguintv/TagEditorNG.py:175 msgid "Please enter a new name for this tag" msgstr "" #: penguintv/TagEditorNG.py:228 msgid "Please enter a name for this new tag:" msgstr "" #: penguintv/TagEditorNG.py:268 msgid "Really Delete Tag?" msgstr "" #: penguintv/TagEditorNG.py:269 msgid "Are you sure you want to remove this tag from all feeds?" msgstr "" #: penguintv/utils.py:167 msgid "complete!" msgstr "" #: penguintv/utils.py:329 msgid "Open" msgstr "" #: penguintv/utils.py:330 msgid "Download" msgstr "" #: penguintv/utils.py:331 msgid "Download And Open" msgstr "" #: penguintv/utils.py:332 msgid "Pause" msgstr "" #: penguintv/utils.py:333 penguintv/utils.py:337 msgid "Cancel" msgstr "" #: penguintv/utils.py:334 msgid "Open File" msgstr "" #: penguintv/utils.py:336 msgid "Resume" msgstr "" #: penguintv/utils.py:338 msgid "Stop" msgstr "" #: penguintv/utils.py:347 msgid "Re-Download" msgstr "" #: penguintv/utils.py:350 msgid "Retry" msgstr "" # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR , YEAR. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2007-07-06 09:53-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: 8bit\n" #: ../share/penguintv.glade.h:1 msgid " " msgstr "" #: ../share/penguintv.glade.h:2 msgid " megabytes" msgstr "" #: ../share/penguintv.glade.h:3 msgid " minutes" msgstr "" #: ../share/penguintv.glade.h:4 msgid "(c)2007 Owen Williams" msgstr "" #: ../share/penguintv.glade.h:5 msgid "0 bytes" msgstr "" #: ../share/penguintv.glade.h:6 msgid "Add Feed" msgstr "" #: ../share/penguintv.glade.h:7 msgid "Add Search Tag" msgstr "" #: ../share/penguintv.glade.h:8 msgid "BitTorrent Options" msgstr "" #: ../share/penguintv.glade.h:9 msgid "Create Feed Filter" msgstr "" #: ../share/penguintv.glade.h:10 msgid "Feed Filter Properties" msgstr "" #: ../share/penguintv.glade.h:11 msgid "General Options" msgstr "" #: ../share/penguintv.glade.h:12 msgid "Login Required" msgstr "" #: ../share/penguintv.glade.h:13 msgid "Modify Search Tag" msgstr "" #: ../share/penguintv.glade.h:14 msgid "Options" msgstr "" #: ../share/penguintv.glade.h:15 msgid "Refresh Mode" msgstr "" #: ../share/penguintv.glade.h:16 msgid "Synchronization Preview" msgstr "" #: ../share/penguintv.glade.h:17 msgid "Synchronizing Media" msgstr "" #: ../share/penguintv.glade.h:18 msgid "Copying..." msgstr "" #: ../share/penguintv.glade.h:19 msgid "A username and password are required for this feed:" msgstr "" #: ../share/penguintv.glade.h:20 msgid "Add Feed" msgstr "" #: ../share/penguintv.glade.h:21 msgid "Add Search Tag" msgstr "" #: ../share/penguintv.glade.h:22 msgid "Add _Feed Filter..." msgstr "" #: ../share/penguintv.glade.h:23 msgid "All Feeds" msgstr "" #: ../share/penguintv.glade.h:24 msgid "Always show notification icon" msgstr "" #: ../share/penguintv.glade.h:25 msgid "Automatically download media" msgstr "" #: ../share/penguintv.glade.h:26 msgid "Automatically download new media" msgstr "" #: ../share/penguintv.glade.h:27 msgid "Autotune refresh period" msgstr "" #: ../share/penguintv.glade.h:28 msgid "Browse..." msgstr "" #: ../share/penguintv.glade.h:29 msgid "Clear Search" msgstr "" #: ../share/penguintv.glade.h:30 msgid "Convert newline characters to HTML paragraph breaks" msgstr "" #: ../share/penguintv.glade.h:31 msgid "Copy _audio files only" msgstr "" #: ../share/penguintv.glade.h:32 msgid "Create Feed Filter" msgstr "" #: ../share/penguintv.glade.h:33 msgid "Delete _files from the database after copying" msgstr "" #: ../share/penguintv.glade.h:34 msgid "Description" msgstr "" #: ../share/penguintv.glade.h:35 msgid "Destination" msgstr "" #: ../share/penguintv.glade.h:36 msgid "Download Unviewed Media" msgstr "" #: ../share/penguintv.glade.h:37 msgid "Download _Unviewed Media" msgstr "" #: ../share/penguintv.glade.h:38 msgid "E_ntry" msgstr "" #: ../share/penguintv.glade.h:39 msgid "Edit Favorite Tags" msgstr "" #: ../share/penguintv.glade.h:40 msgid "Edit Saved Searches" msgstr "" #: ../share/penguintv.glade.h:41 msgid "Edit Tags" msgstr "" #: ../share/penguintv.glade.h:42 msgid "Fee_d" msgstr "" #: ../share/penguintv.glade.h:43 msgid "Feed Filter Properties" msgstr "" #: ../share/penguintv.glade.h:44 msgid "Feed Properties" msgstr "" #: ../share/penguintv.glade.h:45 msgid "Filtered Feed Name:" msgstr "" #: ../share/penguintv.glade.h:46 msgid "Hide Viewed" msgstr "" #: ../share/penguintv.glade.h:47 msgid "Homepage" msgstr "" #: ../share/penguintv.glade.h:48 msgid "If unchecked, media may be deleted to free up space" msgstr "" #: ../share/penguintv.glade.h:49 msgid "Include this feed in search results" msgstr "" #: ../share/penguintv.glade.h:50 msgid "Last Poll" msgstr "" #: ../share/penguintv.glade.h:51 msgid "Limit total disk usage to " msgstr "" #: ../share/penguintv.glade.h:52 msgid "Login Required" msgstr "" #: ../share/penguintv.glade.h:53 msgid "Mark As _Unviewed" msgstr "" #: ../share/penguintv.glade.h:54 msgid "Mark As _Viewed" msgstr "" #: ../share/penguintv.glade.h:55 msgid "Maximum port" msgstr "" #: ../share/penguintv.glade.h:56 msgid "Minimum port" msgstr "" #: ../share/penguintv.glade.h:57 msgid "Modify" msgstr "" #: ../share/penguintv.glade.h:58 msgid "Modify Search Tag" msgstr "" #: ../share/penguintv.glade.h:59 msgid "Never delete media automatically" msgstr "" #: ../share/penguintv.glade.h:60 msgid "Next Poll" msgstr "" #: ../share/penguintv.glade.h:62 msgid "None yet" msgstr "" #: ../share/penguintv.glade.h:63 msgid "Options" msgstr "" #: ../share/penguintv.glade.h:64 msgid "Original Feed Name: " msgstr "" #: ../share/penguintv.glade.h:65 msgid "Password: " msgstr "" #: ../share/penguintv.glade.h:66 msgid "Pause Selected Downloads" msgstr "" #: ../share/penguintv.glade.h:67 msgid "PenguinTV" msgstr "" #: ../share/penguintv.glade.h:68 msgid "PenguinTV Sourceforge Site" msgstr "" #: ../share/penguintv.glade.h:69 msgid "" "Please enter the URL of the feed you would like to add; and any tags you " "would like to apply to this feed, separated by commas:" msgstr "" #: ../share/penguintv.glade.h:70 msgid "" "Please enter the name for the new filtered feed, and the search terms you'd " "like to filter for" msgstr "" #: ../share/penguintv.glade.h:71 msgid "" "Please enter the search terms you'd like to save, and a name for this search." msgstr "" #: ../share/penguintv.glade.h:72 msgid "Podcast and Video Blog aggregator for GTK+ and GNOME" msgstr "" #: ../share/penguintv.glade.h:73 msgid "Poll all feeds on startup" msgstr "" #: ../share/penguintv.glade.h:74 msgid "Preferences" msgstr "" #: ../share/penguintv.glade.h:75 msgid "RSS Feed" msgstr "" #: ../share/penguintv.glade.h:76 msgid "Re_fresh" msgstr "" #: ../share/penguintv.glade.h:77 msgid "Re_fresh Search Indexes" msgstr "" #: ../share/penguintv.glade.h:78 msgid "Re_name" msgstr "" #: ../share/penguintv.glade.h:79 msgid "Refresh Feeds" msgstr "" #: ../share/penguintv.glade.h:80 msgid "Refresh Feeds with _Errors" msgstr "" #: ../share/penguintv.glade.h:81 msgid "Refresh every " msgstr "" #: ../share/penguintv.glade.h:82 msgid "Remove Feed" msgstr "" #: ../share/penguintv.glade.h:83 msgid "Resume" msgstr "" #: ../share/penguintv.glade.h:84 msgid "Resume Selected Downloads" msgstr "" #: ../share/penguintv.glade.h:85 msgid "Resume _All" msgstr "" #: ../share/penguintv.glade.h:86 msgid "Resume downloads on startup" msgstr "" #: ../share/penguintv.glade.h:87 msgid "Save Search..." msgstr "" #: ../share/penguintv.glade.h:88 msgid "Search Feeds and Entries" msgstr "" #: ../share/penguintv.glade.h:89 msgid "Search Terms:" msgstr "" #: ../share/penguintv.glade.h:90 msgid "" "Select a destination where all the downloaded PenguinTV media will be " "copied. If the destination is an mp3 player, make sure the mp3 player is " "connected before synchronizing." msgstr "" #: ../share/penguintv.glade.h:91 msgid "" "Select a tag in the list on the left, and all the feeds with that tag will " "be marked on the right. You may mark and unmark feeds to add or remove that " "tag from them.\n" "\n" "Tagged feeds will appear at the top of the list." msgstr "" #: ../share/penguintv.glade.h:94 msgid "Show _Notifications" msgstr "" #: ../share/penguintv.glade.h:95 msgid "Show notifications when this feed updates" msgstr "" #: ../share/penguintv.glade.h:96 msgid "Stop Selected Downloads" msgstr "" #: ../share/penguintv.glade.h:97 msgid "Synchonization Preview" msgstr "" #: ../share/penguintv.glade.h:98 msgid "Synchronize" msgstr "" #: ../share/penguintv.glade.h:99 msgid "Synchronizing" msgstr "" #: ../share/penguintv.glade.h:100 msgid "TEMPORARY ICOPN" msgstr "" #: ../share/penguintv.glade.h:101 msgid "Tag Editor" msgstr "" #: ../share/penguintv.glade.h:102 msgid "Tag Editor..." msgstr "" #: ../share/penguintv.glade.h:103 msgid "Tag Highlighter:" msgstr "" #: ../share/penguintv.glade.h:104 msgid "Tag name: " msgstr "" #: ../share/penguintv.glade.h:105 msgid "Tags:" msgstr "" #: ../share/penguintv.glade.h:106 msgid "Tags: " msgstr "" #: ../share/penguintv.glade.h:107 msgid "Title" msgstr "" #: ../share/penguintv.glade.h:108 msgid "URL: " msgstr "" #: ../share/penguintv.glade.h:109 msgid "Upload rate limit" msgstr "" #: ../share/penguintv.glade.h:110 msgid "User Name: " msgstr "" #: ../share/penguintv.glade.h:111 msgid "Using: " msgstr "" #: ../share/penguintv.glade.h:112 msgid "Work around feeds where entries appear to be in one large paragraph" msgstr "" #: ../share/penguintv.glade.h:113 msgid "" "You can drag tags from the righthand side to the favorites list on the " "left. To remove a favorite, drag it from the lefthand side back to the " "right." msgstr "" #: ../share/penguintv.glade.h:114 msgid "You may change the name of this search as well as the search terms." msgstr "" #: ../share/penguintv.glade.h:115 msgid "You may click on the items in the list to edit tags." msgstr "" #: ../share/penguintv.glade.h:116 msgid "_Add Feed..." msgstr "" #: ../share/penguintv.glade.h:117 msgid "_Delete All Media" msgstr "" #: ../share/penguintv.glade.h:118 msgid "_Delete Media" msgstr "" #: ../share/penguintv.glade.h:119 msgid "_Delete files on destination that are no longer in the database" msgstr "" #: ../share/penguintv.glade.h:120 msgid "_Download Media" msgstr "" #: ../share/penguintv.glade.h:121 msgid "_Export Subscriptions..." msgstr "" #: ../share/penguintv.glade.h:122 msgid "_Fancy Feed Display" msgstr "" #: ../share/penguintv.glade.h:123 msgid "_Favorite Tags..." msgstr "" #: ../share/penguintv.glade.h:124 msgid "_Go" msgstr "" #: ../share/penguintv.glade.h:125 msgid "_Horizontal Layout" msgstr "" #: ../share/penguintv.glade.h:126 msgid "_Import Subscriptions..." msgstr "" #: ../share/penguintv.glade.h:127 msgid "_Mark As Viewed" msgstr "" #: ../share/penguintv.glade.h:128 msgid "_Planet Style Layout" msgstr "" #: ../share/penguintv.glade.h:129 msgid "_Play Media" msgstr "" #: ../share/penguintv.glade.h:130 msgid "_Preview" msgstr "" #: ../share/penguintv.glade.h:131 msgid "_Properties" msgstr "" #: ../share/penguintv.glade.h:132 msgid "_Refresh Feeds" msgstr "" #: ../share/penguintv.glade.h:133 msgid "_Remove Feed" msgstr "" #: ../share/penguintv.glade.h:134 msgid "_Saved Searches..." msgstr "" #: ../share/penguintv.glade.h:135 msgid "_Show Today's Downloads..." msgstr "" #: ../share/penguintv.glade.h:136 msgid "_Standard Layout" msgstr "" #: ../share/penguintv.glade.h:137 msgid "_Synchronize" msgstr "" #: ../share/penguintv.glade.h:138 msgid "_Synchronize Media..." msgstr "" #: ../share/penguintv.glade.h:139 msgid "_Vertical Layout" msgstr "" #: ../share/penguintv.glade.h:140 msgid "_View" msgstr "" #: ../share/penguintv.glade.h:141 msgid "a" msgstr "" #: ../share/penguintv.glade.h:142 msgid "unknown" msgstr "" PenguinTV-4.2.0/LICENSE0000644000000000000000000010451410745132600011272 0ustar GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . PenguinTV-4.2.0/PenguinTVActivity.py0000644000000000000000000000362210651741653014243 0ustar import sys, os, logging import gtk, gobject from sugar.activity import activity import hulahop #need to set things up before we import penguintv activity_root = activity.get_bundle_path() #chdir here so that relative RPATHs line up ('./lib') os.chdir(activity_root) #append to sys.path for the python packages sys.path = [activity_root,] + sys.path try: import pycurl except: logging.warning("Trying to load bundled pycurl libraries") #append to sys.path for the python packages sys.path.append(os.path.join(activity_root, 'site-packages')) #try again. if it fails now, let it fail import pycurl os.environ['SUGAR_PENGUINTV'] = '1' #set up variable so that utils knows we are running_sugar from penguintv import penguintv class PenguinTVActivity(activity.Activity): def __init__(self, handle): activity.Activity.__init__(self, handle) app = penguintv.PenguinTVApp(self) self.set_title('News Reader') toolbox = activity.ActivityToolbox(self) #toolbox.add_toolbar(_('Feeds'), app.main_window.toolbar) app.connect('app-loaded', self.add_toolbar, toolbox) #gobject.idle_add(self.add_toolbar, toolbox, app) self.set_toolbox(toolbox) toolbox.show() self.connect('destroy',self.do_quit, app) def do_quit(self, event, app): app.do_quit() logging.info('deleting app now') del app def add_toolbar(self, app, toolbox): toolbox.add_toolbar(_('Feeds'), app.main_window.toolbar) if __name__ == '__main__': # Here starts the dynamic part of the program def do_quit(self, event, app): app.do_quit() window = gtk.Window() gtk.gdk.threads_init() app = penguintv.PenguinTVApp(window) window.connect('delete-event', do_quit, app) gtk.main() def main(): #another way to run the program def do_quit(self, event, app): app.do_quit() window = gtk.Window() gtk.gdk.threads_init() app = penguintv.PenguinTVApp(window) window.connect('delete-event', do_quit, app) gtk.main() PenguinTV-4.2.0/ptv_profile.py0000644000000000000000000000020310646750274013173 0ustar def run_ptv(): import PenguinTVActivity PenguinTVActivity.main() import profile profile.run('run_ptv()', '/tmp/penguintv-prof') PenguinTV-4.2.0/mo/0000755000000000000000000000000011450721177010702 5ustar PenguinTV-4.2.0/mo/pt/0000755000000000000000000000000011450721177011325 5ustar PenguinTV-4.2.0/mo/pt/LC_MESSAGES/0000755000000000000000000000000011450721177013112 5ustar PenguinTV-4.2.0/mo/pt/LC_MESSAGES/penguintv.mo0000644000000000000000000001716411450721203015465 0ustar l|0 1 < E [ c s      / * J )k        % :- h )y       # 1 c=        ( = F P V ` {   & M 7Q4`#      *6 =IO^ cn'(*  (5 GUeuy    &`0u )1GOmK%6q2/2 >GNW)v)%:++=i|  8Lf\.(; N \!j    8lSE3.O ~    , BL]n  431.`e~ -?Wq kZ*c/ZRU J.P>TI3 6a2-H4O?:hSeF`[MgY^EB%\)8 Xik<Wd9V$jL' D&1f!, ="G#C(bKA5N;_@0Q+] l7 megabytes minutes(c)2005 Owen Williams0 bytesAdd FeedBitTorrent OptionsEdit TagsGeneral OptionsRefresh ModeRename FeedAdd FeedAll FeedsAll Feeds Downloaded Media No Feeds (Calm Mode)Are you sure you want to delete this feed?Automatically download new mediaAutomatically resume downloads on startupAutotune refresh periodCancelDeleteDownloadDownload And PlayDownload Unviewed MediaDownload _Unviewed MediaDownloaded %(progress)s%% of %(size)sDownloaded %(progress)s%% of %(size)s, %(time)s remaining.Downloaded MediaDownloading %(progress)d%% of %(size)s...Downloading %s...E_ntryEdit TagsEdit _TagsEdit _Tags...Error adding feedExporting Feeds...Fee_dFeed AddedFeeds UpdatedFull Entry...Hide ViewedIf PenguinTV downloads all of the unviewed media, it will take %(space)s. Do you wish to continue?Importing Feeds, please wait...Limit total disk usage to Mark As _UnviewedMark As _ViewedMark as _ViewedMaximum portMinimum portNo Feeds (Calm Mode)None yetOpen FilePausePenguinTVPenguinTV Sourceforge SitePlayPlay Media WithPlay Unviewed MediaPlease enter a new name for this feed:Please enter tags you would like to apply to this feed, separated by commas. Please enter the URL of the feed you would like to add:Please wait...Podcast and Video Blog aggregator for GTK+ and GNOMEPolling feeds for the first time...PreferencesRe-DownloadRe_freshRe_nameRe_name...Really Delete Feed?Refresh FeedsRefresh every Remove FeedRename FeedResumeResume _AllRetrySelect OPML...StopTag EditorTag Highlighter:Tags:Tags: There is no unviewed media to download.There was an error downloading the file.There was an error downloading the torrentURL: Upload rate limitUsing: _Add Feed..._Delete All Media_Delete Media_Download Media_Export OPML..._Go_Horizontal Layout_Import OPML..._Mark As Viewed_Play Media_Play Unviewed_Refresh Feeds_Remove Feed_Show Today's Downloads..._Standard Layout_Vertical Layout_Viewcomplete!expected at least half of reported size %(reported_size)s but the file is %(actual_size)s bytes.finished downloading %sProject-Id-Version: PACKAGE VERSION POT-Creation-Date: 2006-02-25 18:31+EST PO-Revision-Date: 2006-02-21 20:14+EST Last-Translator: Susana Pereira Language-Team: Portuguese MIME-Version: 1.0 Content-Type: text/plain; charset=CHARSET Content-Transfer-Encoding: ENCODING Generated-By: pygettext.py 1.5 megabytesminutos(c)2005 Owen Williams0 bytesAdicionar SubscriçãoOpções para BitTorrentEditar EtiquetasOpções GeraisModo de ActualizaçãoRenomear SubscriçãoAdicionar SubscriçãoTodas as subscriçõesTodas as Subscrições Media Transferidos Nenhuma Subscrição (Modo Calmo)Você tem certeza que deseja apagar esta subscrição?Executar automaticamente o download de novos mediaResumir automaticamente os downloads ao iniciarPeríodo de actualização automática de sintoniaCancelarApagarDownloadExecutar Download e ReproduzirExecutar o download dos Media Não VistosExecutar o download dos Media Não VistosTransferiu %(progress)s%% de %(size)sTransferiu %(progress)s%% de %(size)s, %(time)s restantes.Media tranferidosA transferir %(progress)d%% de %(size)s...A transferir %s...E_ntradaEditar EtiquetasEditar E_tiquetasEditar E_tiquetasErro ao adicionar a subscriçãoExportando Subscrições..._SubscriçãoSubscrição AdicionadaSubscrições ActualizadasEntrada Completa...Esconder VistosSe o PenguinTV executar o download de todos os media inéditos, ocupará %(space)s. Deseja continuar?A importar subscrições, por favor aguarde...Limitar uso total do disco a Marcar Como _Não VistoMarcar Como _VistoMarcar como _VistoPorto máximoPorto mínimoNenhuma subscrição (Modo Calmo)Nenhum aindaAbrir FicheiroPausarPenguinTVSite do PenguinTV no SourceforgeReproduzirReproduzir Media ComReproduzir os Media Não VistosPor favor introduza um novo nome para esta subscrição:Por favor introduza as etiquetas que você gostaria de aplicar a esta subscrição, separadas por vírgulas.Por favor introduza o URL da subscrição que você deseja adicionar:Por favor aguarde...Podcast e agregador de VídeoBlog para GTK+ e GNOMEA verificar subscrições pela primeira vez...PreferênciasExecutar o Download Novamente_ActualizarRe_nomearRe_nomearApagar Realmente a Subscrição?Actualizar SubscriçõesActualizar a cada Remover SubscriçãoRenomear SubscriçãoContinuarContinuar _TodosTentar NovamenteSelecionar OPML...PararEditor de EtiquetasDestacar Etiquetas:Etiquetas:Etiquetas:Não há media não vistos para executar o download.Ocorreu um erro ao efectuar o download do ficheiro.Ocorreu um erro ao efectuar o download do torrentURL:Limite da taxa de uploadA usar: _Adicionar Subscrição...A_pagar Todos os MediaA_pagar Media_Transferir Media_Exportar OPML...I_rDisposição _Horizontal_Importar OPML..._Marcar Como VistoRe_produzir MediaRe_produzir Não VistosA_ctualizar Subscrições_Remover SubscriçãoMo_strar os Downloads do Dia...Di_sposição PadrãoDisposição _Vertical_Visualizaçãocompleto!esperava pelo menos metade do tamanho %(reported_size)s anunciado mas o ficheiro tem %(actual_size)s bytes.terminou o download de %sPenguinTV-4.2.0/mo/sv/0000755000000000000000000000000011450721177011332 5ustar PenguinTV-4.2.0/mo/sv/LC_MESSAGES/0000755000000000000000000000000011450721177013117 5ustar PenguinTV-4.2.0/mo/sv/LC_MESSAGES/penguintv.mo0000644000000000000000000002270311450721203015465 0ustar t       9 J a w  3   / *& Q Z q         %(%N:t    '4 :H ^ lcx+$=bq    $ *4O&Md.4"Wq#     & 2> EQms '(*9,d PI% o|     '4O`qw yJ /F \j~9 '12.d  (B]#o$@  !/A^r x  ]>Rc~ !.= R] t   u8>1TX3f  5  !!'%!M!\!k! t!!!!!! !!!!$ " 1" ?"M"d" j" " """.")"2 #1?#q#### #U#[$v$$$ $$$$$$% %%%6% F%T%r%%%%%%N~XS +$eJ^tLPW_|D6y0Y*kx(ja9gwh>uOm8]Z1=rnA"Hc{ 5@)\ ?IB4VUqQ,C/GlR[.T!o-2p<vF3;i z M'#&7 bKf`:dE}s% megabytes minutes(c)2005 Owen Williams0 bytes404: File Not FoundAdd FeedBitTorrent OptionsEdit TagsGeneral OptionsLogin RequiredRefresh ModeRename FeedA username and password are required for this feed:Active DownloadsAdd FeedAll FeedsAll Feeds Downloaded Media No Feeds (Calm Mode)Are you sure you want to delete this feed?ArticlesAuthorization RequiredAutomatically download new mediaAutotune refresh periodCancelChanging Layouts...DeleteDownloadDownload And PlayDownload Unviewed MediaDownload _Unviewed MediaDownload queuedDownloaded %(progress)d%% of %(size)sDownloaded %(progress)s%% of %(size)sDownloaded %(progress)s%% of %(size)s, %(time)s remaining.Downloaded MediaDownloading %s...E_ntryEdit TagsEdit _Tags...Error adding feedExporting Feeds...Fee_dFeed AddedFeed UpdatedFeedsFeeds UpdatedFinishing OPML importFull Entry...Hide ViewedIf PenguinTV downloads all of the unviewed media, it will take %(space)s. Do you wish to continue?Importing OPML fileLarge DownloadLimit total disk usage to Loading Feeds...Loading images...Loading the feeds from the OPML fileLogin RequiredMark As _UnviewedMark As _ViewedMark as _UnviewedMark as _ViewedMaximum portMinimum portNo Feed in PageNo Feeds (Calm Mode)No Unviewed MediaNone yetOpen FilePassword: PausePenguinTVPenguinTV Sourceforge SitePenguinTV couldn't find a feed in the web page you provided. You will need to find the RSS feed link in the web page yourself. Sorry.PlayPlay Unviewed MediaPlease enter a new name for this feed:Please enter tags you would like to apply to this feed, separated by commas. Please enter the URL of the feed you would like to add; and any tags you would like to apply to this feed, separated by commas:Please wait while PenguinTV changes layouts...Please wait...Podcast and Video Blog aggregator for GTK+ and GNOMEPoll all feeds on startupPolling Feed...Polling Feeds...Polling feeds for the first time...PreferencesRe-DownloadRe_freshRe_name...Really Delete Feed?Refresh FeedsRefresh every Reloading Feeds...Remove FeedRename FeedResumeResume _AllResume downloads on startupRetrySelect OPML...Some HTTP error: %(response)sStopStop All Active DownloadsTag EditorTag Highlighter:Tags:Tags: There is no unviewed media to download.There was an error downloading the file.There was an error downloading the torrentThere was an error trying to poll this feed.Trying to poll feed...URL: Upload rate limitUser Name: Using: WARNING: Expected %(reported_size)s bytes but the file is %(actual_size)s bytes.You must specify a valid username and password in order to add this feed._Add Feed..._Delete All Media_Delete Media_Download_Download Media_Export OPML..._Go_Horizontal Layout_Import OPML..._Mark As Viewed_Play Media_Play Unviewed_Refresh Feeds_Remove Feed_Show Today's Downloads..._Standard Layout_Vertical Layout_Viewacomplete!finished downloading %sProject-Id-Version: PACKAGE VERSION POT-Creation-Date: 2006-08-10 12:26+EDT PO-Revision-Date: 2006-08-14 12:26+CEST Last-Translator: Mikael Freeman Language-Team: LANGUAGE MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8-bit Generated-By: pygettext.py 1.5 megabytesminuter(c)2005 Owen Williams0 bytes404: Filen Hittades InteLägg till FeedBitTorrent ValEditera TagarAllmäna ValInloggning KrävsOmladdningslägeByt Namn På FeedEtt användarnamn och lösenord krävs för denna feeden:Aktiva NerladdningarLägg Till FeedAlla FeedsAlla Feeds Nerladdad Media Inga Feeds (Calm Mode)Är du säker du vill ta bort den här feeden?ArtiklarVerifiering krävsLadda ner automatiskt ny mediaAutomatisera omladdningsperiodAvbrytByt Layout...Ta bortLadda nerLadda Ner Och SpelaLadda Ner Icke Seed MediaLadda Ner _Icke sedd MediaNerladdning i köNerladdat %(progress)%% av %(size)sNerladdat %(progress)s%% av %(size)sNerladdat %(progress)s%% av %(storlek)ar, %(timme)ar återstår.Nerladdad MediaLaddar ner %s...L_istaEditera TagarEditera _Tagar...Fel vid tilläggande av feedExporterar Feeds...Fee_dFeed tillagdFeed UppdateradFeedsFeeds UppdateradeAvslutar OPML importeringHela Listan..Göm seddaOM PenguinTV laddar ner all icke seed media, kommer de att ta %(space)s. Vill du fortsätta?importerar OPML filStor nerladdningBegränsa diskutrymme tillLaddar Feeds...Laddar bilder...Laddar feeds from OPML filenInloggning KrävsMarkera Som _Icke SeddMarkera Som _visadMarkera som _icke seddMarkera som _seddSista portenFörsta portenInga feeds på sidanInga FeedsIngen Icke Visad MediaIngen ännuÖppna FilLösenord: PausePenguinTVPenguinTV Sourceforge SidaPenguinTV kunde inte hitta en feed på websidan du angav. Du måste hitta länken till RSS feeden på websidan självSpelaSpela Icke Sedd MediaVar vänlig ange ett nytt namn för denna feeden:Var vänlig att skriva in tagar Du villl. lägg till feeden, separerade av kommatecken. Var vänlig att fylla i url:en på feeden du vill lägga till; och tagar du vill lägga till denna feeden, separerade av kommatecken:Var vänlig vänta medans PenguinTV byter layout...Var vänlig vänta...Podcast och Video Blog aggregator för GTK+ och GNOMEGranska alla feeds vid upstartGranskar Feed...granskar feedsGranskar feeds för första gången... InställningarLadda Ner IgenLadda_OmByt_namn...Vill Du Verkligen Ta Bort Feeden?Ladda Om FeedsLadda om varje Laddar om Feeds...Ta Bort FeedByt Namn På FeedForsättFortsätt _AllaFortsätt nerladdningar vid uppstartFörsök IgenVälj OPML...HTTP fel: %(response)sStoppStoppa Alla Aktiva NerladdningarTag EditorFramhäv Tag:Tagar:Tagar: Det finns ingen icke visad media att ladda nerDet blev ett fel vid nerladdning av filenDet uppstod ett fel vid nerladdningen av torrentenDet uppstod ett fel vid gransking av denna feedenFörsöker att granska feed...URL: UppladningsgränsAnvändarnamn: Använder: VARNING: Förväntade sig %(reported_size)s bytes med filen is %(actual_size)s bytes.DU måste fylla i ett giltigt användarnamn och lösenord för att lägga till denna feeden_Lägg Till Feed..._Tab Bort All Media_Ta Bort MediaLadda ner_Nerladdad Media_Exportera OPML..._KörHorisontell Layout_Importera OPML..._Markera som seddSpela Media_Spela icke sedd_Ladda om Feeds_Ta bort Feed_Visa Dagens Nerladdningar..._Standard Layout_Vertikal Layout_Visaenfärdig!slutförde nerladdning av %sPenguinTV-4.2.0/mo/pt_BR/0000755000000000000000000000000011450721177011710 5ustar PenguinTV-4.2.0/mo/pt_BR/LC_MESSAGES/0000755000000000000000000000000011450721177013475 5ustar PenguinTV-4.2.0/mo/pt_BR/LC_MESSAGES/penguintv.mo0000644000000000000000000001717111450721203016046 0ustar r<        5 I \ e /o * )  - 4 ; D V n % :  ) # 5 < F Q _ q  c ! A P k }       .3C&WM~74#H l x      !28'?(g*, , > LVfvz  ! '`1s (0FNe 6/8#h,10"H6k((@U[lW#& 6Wl~       ALb-}b:I3^#      2@O Wev  .""$$Gl   %7J]r ib7hF$ 1HV;ErS"A0`6D'c<%j]^o@=O9[YXlg_\+T L#iWp KQB >nR,C5k*a)(.e3IUmNMd4P2J!bZ/:8qG ?&f- megabytes minutes(c)2005 Owen Williams0 bytesAdd FeedBitTorrent OptionsEdit TagsGeneral OptionsRefresh ModeRename FeedAdd FeedAll FeedsAll Feeds Downloaded Media No Feeds (Calm Mode)Are you sure you want to delete this feed?Automatically download new mediaAutomatically resume downloads on startupAutotune refresh periodCancelDeleteDownloadDownload And PlayDownload Unviewed MediaDownload _Unviewed MediaDownloaded %(progress)s%% of %(size)sDownloaded %(progress)s%% of %(size)s, %(time)s remaining.Downloaded MediaDownloading %(progress)d%% of %(size)s...Downloading %s...E_ntryEdit TagsEdit _TagsEdit _Tags...Error adding feedExporting Feeds...Fee_dFeed AddedFeeds UpdatedFull Entry...Hide ViewedIf PenguinTV downloads all of the unviewed media, it will take %(space)s. Do you wish to continue?Importing Feeds, please wait...Large DownloadLimit total disk usage to Mark As _UnviewedMark As _ViewedMark as _UnviewedMark as _ViewedMaximum portMinimum portNo Feeds (Calm Mode)No Unviewed MediaNone yetOpen FilePausePenguinTVPenguinTV Sourceforge SitePlayPlay Media WithPlay Unviewed MediaPlease enter a new name for this feed:Please enter tags you would like to apply to this feed, separated by commas. Please enter the URL of the feed you would like to add:Please wait...Podcast and Video Blog aggregator for GTK+ and GNOMEPolling feeds for the first time...PreferencesRe-DownloadRe_freshRe_nameRe_name...Really Delete Feed?Refresh FeedsRefresh every Remove FeedRename FeedResumeResume _AllRetrySelect OPML...StopTag EditorTag Highlighter:Tags:Tags: There is no unviewed media to download.There was an error downloading the file.There was an error downloading the torrentThere was an error trying to poll this feed.Trying to poll feed...URL: Upload rate limitUsing: _Add Feed..._Delete All Media_Delete Media_Download_Download Media_Export OPML..._Go_Horizontal Layout_Import OPML..._Mark As Viewed_Play Media_Play Unviewed_Refresh Feeds_Remove Feed_Show Today's Downloads..._Standard Layout_Vertical Layout_Viewcomplete!expected at least half of reported size %(reported_size)s but the file is %(actual_size)s bytes.finished downloading %sProject-Id-Version: PACKAGE VERSION POT-Creation-Date: 2006-02-25 18:31+EST PO-Revision-Date: 2006-05-10 23:13-0500 Last-Translator: Og Maciel Language-Team: Brazilian Portuguese MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 megabytesminutos(c)2005 Owen Williams0 bytesAdicionar CanalOpções para BitTorrentEditar EtiquetasOpções GeraisModo de RecarregarRenomear CanalAdicionar CanalTodos CanaisTodos Canais Mídia abaixada Nenhum Canal (Modo Calmo)Você tem certeza que deseja apagar este canal?Abaixar automaticamente nova mídiaResumir automaticamente downloads ao iniciarPeríodo de atualização automática de sintoniaCancelarApagarAbaixarAbaixar e ReproduzirAbaixar Mídia InéditaAbaixar Mídia InéditaAbaixou %(progress)s%% de %(size)sAbaixou %(progress)s%% de %(size)s, %(time)s restante.Abaixou MídiaAbaixando %(progress)d%% de %(size)s...Abaixando %s...E_ntradaEditar EtiquetasEditar E_tiquetasEditar E_tiquetasErro ao adicionar canalExportando Canais...CanalCanal AdicionadoCanais AtualizadosEntrada Completa...Esconder Não InéditoSe PenguinTV abaixar toda mídia inédita, ocupará %(space)s. Você deseja continuar?Importando Canais, favor esperar...Download GrandeLimitar uso total do disco para Marcar Como InéditoMarcar Como VistoMarcar Como InéditoMarcar como _VistoPorta máximaPorta mínimaNenhum Canal (Modo Calmo)Nenhuma Mídia InéditaNenhum aindaAbrir ArquivoPausarPenguinTVSite no Sourceforge do PenguinTVReproduzirReproduzir Mídia ComReproduzir Mídia InéditaPor favor entre um novo nome para este canal:Por favor entre as etiquetas que você gostaria de aplicar à este canal, separadas por vírgulas.Por favor entre o URL do canal que você deseja adicionar:Por favor esperar...Podcast e agregador de VídeoBlog para GTK+ e GNOMEPuxando canais pela primeira vez...PreferênciasAbaixar NovamenteRecarregarRe_nomearRe_nomearRealmente Apagar Canal?Recarregar CanaisRecarregar a cada Remover CanalRenomear CanalResumirResumir TodosTentar NovamenteSelecionar OPML...PararEditor de EtiquetasDestacador de Etiquetas:Etiquetas:Etiquetas:Não há nenhuma mídia inédita para abaixar.Houve um erro abaixando o arquivo.Houve um erro ao abaixar o torrentHouve um erro adicionando este canalTentando puxar canal...URL:Limite da taxa de uploadUsando: _Adicionar Canal...Apagar Toda MídiaApagar MídiaAbaixarAbaixar Mídia_Exportar OPML...IrDisposição _Horizontal_Importar OPML..._Marcar Como VistoRe_produzir MídiaRe_produzir Inédito_Recarregar Canais_Remover CanalMo_strar Downloads do Dia...Di_sposição PadrãoDisposição _Vertical_Visualizaçãocompleto!experado pelo menos metade do tamanho %(reported_size)s anunciado mas o arquivo é %(actual_size)s bytes.terminou de abaixar %sPenguinTV-4.2.0/penguintv/0000755000000000000000000000000011450721611012277 5ustar PenguinTV-4.2.0/penguintv/ptvDbus.py0000644000000000000000000000441411275641464014317 0ustar import dbus import dbus.service import dbus.glib import gobject import utils class ptvDbus(dbus.service.Object): def __init__(self, app, bus, object_path="/PtvApp"): self._app = app dbus.service.Object.__init__(self, bus, object_path) @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def GetDatabaseName(self): return self._app.get_database_name() @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def AddFeed(self, url): if utils.RUNNING_SUGAR: self.sugar_add_button.popup() else: self._app.main_window.show_window_add_feed(False) self._app.main_window.set_window_add_feed_location(url) @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def ImportOpml(self, filename): try: f = open(filename) self._app.import_subscriptions(f) except e: print "Error importing subscriptions:", e return @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def PollingCallback(self, pyobject_str, cancelled=False): #print "args we got to callback: %s" % pyobject_str args = eval(pyobject_str) self._app.polling_callback(args, cancelled) if self._app.is_exiting(): return False return True @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def FinishedCallback(self, total): self._app.poll_finished_cb(total) @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def Ping(self): return self._app.poller_ping_cb() @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def Play(self): if self._app.is_exiting(): return False self._app.player.control_internal("play") return True @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def Pause(self): if self._app.is_exiting(): return False self._app.player.control_internal("pause") return True @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def Next(self): if self._app.is_exiting(): return False self._app.player.control_internal("next") return True @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def Prev(self): if self._app.is_exiting(): return False self._app.player.control_internal("prev") return True @dbus.service.method("com.ywwg.PenguinTV.AppInterface") def PlayPause(self): if self._app.is_exiting(): return False self._app.player.control_internal("playpause") return True PenguinTV-4.2.0/penguintv/Downloader.py0000644000000000000000000000401611151600304014741 0ustar #order is only used in downloadview for order in which to cancel -- I think QUEUED = 0 PAUSED = 1 STOPPED = 2 FINISHED = 3 FINISHED_AND_PLAY = 4 DOWNLOADING = 5 FAILURE = -1 #import traceback import gobject class Downloader: """Interface class for downloading. Doesn't do anything""" def __init__(self, media, media_dir, params, resume, queue, progress_callback=None, finished_callback=None): #no params if progress_callback is not None: self._app_progress_callback = progress_callback else: self._app_progress_callback = self._basic_progress_callback if finished_callback is not None: self._app_finished_callback = finished_callback else: self._app_finished_callback = self._basic_finished_callback self._resume = resume self._queue = queue self._media_dir = media_dir self._stop_download = False self.media = media self.status = QUEUED self.message = "" self.progress = 0 self.total_size = 1 def download(self,args): """args is set by ThreadPool, and is unused""" if self._stop_download: return False self.running = True self.status = DOWNLOADING return True def _progress_callback(self): if self._stop_download: self._app_progress_callback(self) return 1 return self._app_progress_callback(self) def _finished_callback(self): return self._app_finished_callback(self) def pause(self): self.status = PAUSED self._stop_download = True def stop(self): self.status = STOPPED if self._stop_download: #if it's called _again_, ping the app and say "we're done already!" return self._app_finished_callback(self) self._stop_download = True def _basic_finished_callback(self, data): filename = data[0]['file'] info = data[1:] print filename+" "+str(info) def _basic_progress_callback(self, data): media, blocks, blocksize, totalsize=data percent = (blocks*blocksize*100) / totalsize if percent>100: percent = 100 if percent%10 == 0: print media['file']+" "+str(percent)+"%" PenguinTV-4.2.0/penguintv/FtpSyncClient.py0000644000000000000000000001136511073705157015414 0ustar import logging import threading import ftplib import socket import time import tempfile from SqliteSyncClient import SqliteSyncClient FILENAME = 'penguintv-syncdb-1.db' STAMP_FILENAME = 'penguintv-syncdb-timestamp' class FtpSyncClient(SqliteSyncClient): def __init__(self): #username = 'transfer' #password = 'upwego' #hostname = 'ftp.powderhouse.net' #path = '/test' #port = 21 SqliteSyncClient.__init__(self) self.__transfer_lock = threading.Lock() self._username = "" self._password = "" self._hostname = "" self._port = 21 self._path = "/" self._ftp = None self._stampfile = tempfile.mkstemp(suffix='.txt')[1] self._last_conn_check = 0 def get_parameters(self): return [ (_("FTP Server"), "hostname", "", False), (_("Port"), "port", "21", False), (_("Username"), "username", "", False), (_("Password"), "password", "", True), (_("Path"), "path", "/", False) ] def set_hostname(self, hostname): if hostname == self._hostname: return self.finish() self._hostname = hostname def set_port(self, port): try: port = int(port) self._port = -1 except: return False if port == self._port: return self.finish() self._port = port def set_path(self, path): if path == self._path: return self.finish() self._path = path def _do_authenticate(self, tryreconnect=False): if self._ftp is not None and not tryreconnect: self._ftp.quit() self._ftp = None else: self._ftp = ftplib.FTP() try: self._ftp.connect(self._hostname, self._port) except: return False try: self._ftp.login(self._username, self._password) except: return False try: self._ftp.cwd(self._path) except: return False self._last_conn_check = int(time.time()) return True def _set_server_timestamp(self, timestamp): assert self._authenticated if not self._check_connection(): return False self.__transfer_lock.acquire() f = open(self._stampfile, 'w') f.write(str(timestamp)) f.close() try: f = open(self._stampfile, 'r') self._ftp.storlines('STOR %s' % STAMP_FILENAME, f) f.close() self.__transfer_lock.release() return True except: self.__transfer_lock.release() return False def _get_server_timestamp(self): global timestamp timestamp = None def retr_cb(line): global timestamp timestamp = int(line) assert self._authenticated if not self._check_connection(): return -1 self.__transfer_lock.acquire() self._ftp.retrlines('RETR %s' % STAMP_FILENAME, retr_cb) for i in range(0,10): if timestamp is not None: self.__transfer_lock.release() return timestamp time.sleep(1) self.__transfer_lock.release() return -1 def _db_exists(self): global stamp_exists stamp_exists = False def dir_cb(line): global stamp_exists if STAMP_FILENAME in line: stamp_exists = True if not self._check_connection(): return False self.__transfer_lock.acquire() self._ftp.dir(dir_cb) for i in range(0,10): if stamp_exists: self.__transfer_lock.release() return True time.sleep(0.25) self.__transfer_lock.release() return False def _do_download_db(self): global data if not self._check_connection(): return None self.__transfer_lock.acquire() filesize = self._ftp.size(FILENAME) #logging.debug("expecting to download: %s" % str(filesize)) if filesize is None: return None data = "" def retr_cb(line): global data #logging.debug("line: %i data: %i" % (len(line), len(data))) data += line self._ftp.retrbinary('RETR %s' % FILENAME, retr_cb) wait = 0 last_size = 0 while len(data) < filesize and wait < 30: #logging.debug("size: %i wait: %i" % (len(data), wait)) if len(data) > last_size: wait = 0 last_size = len(data) time.sleep(1) wait += 1 self.__transfer_lock.release() if len(data) < filesize: #logging.debug("got less than we expected") return None return data def _upload_db(self, fp): if not self._check_connection(): return False self.__transfer_lock.acquire() try: self._ftp.storbinary('STOR %s' % FILENAME, fp) except: self.__transfer_lock.release() return False self.__transfer_lock.release() return True def _check_connection(self): def dir_cb(line): pass if int(time.time()) - self._last_conn_check < 30: #logging.debug("last connection was recent, assuming ok") return True #logging.debug("checking connection") try: self._ftp.dir(dir_cb) #logging.debug("connection still up") return True except Exception, e: #logging.debug("exception checking connection: %s" % str(e)) if not self._do_authenticate(tryreconnect=True): #logging.debug("can't reconnect") return False #logging.debug("reconnected") return True PenguinTV-4.2.0/penguintv/html/0000755000000000000000000000000011450514774013255 5ustar PenguinTV-4.2.0/penguintv/html/PTVMozilla.py0000644000000000000000000001065511425372100015622 0ustar import os.path import logging import gobject import gtk import PTVhtml import utils try: import gtkmozembed except: try: from ptvmozembed import gtkmozembed except: pass class PTVMozilla(PTVhtml.PTVhtml): def __init__(self, view, home, share_path): PTVhtml.PTVhtml.__init__(self, view, home, share_path) self._home = home self._css = "" self._realized = False self._USING_AJAX = False self._view = view if utils.RUNNING_HILDON: f = open(os.path.join(share_path, "mozilla-planet-hildon.css")) else: f = open(os.path.join(share_path, "mozilla-planet.css")) for l in f.readlines(): self._css += l f.close() def finish(self): self._moz.destroy() gtkmozembed.pop_startup() def is_ajax_ok(self): if utils.RUNNING_HILDON: return False return True def post_show_init(self, widget): if utils.RUNNING_HILDON: logging.debug("Hildon: Not using ajax view") self._USING_AJAX = False else: self._USING_AJAX = True if utils.HAS_GCONF: try: import gconf except: from gnome import gconf self._conf = gconf.client_get_default() self._conf.notify_add('/desktop/gnome/interface/font_name',self._gconf_reset_moz_font) self._reset_moz_font() utils.init_gtkmozembed() gtkmozembed.set_profile_path(self._home, 'gecko') gtkmozembed.push_startup() self._moz = gtkmozembed.MozEmbed() self._moz.connect("new-window", self._new_window) self._moz.connect("link-message", self._link_message) self._moz.connect("open-uri", self._link_clicked) self._moz.connect("realize", self._realize, True) self._moz.connect("unrealize", self._realize, False) widget.add_with_viewport(self._moz) self._moz.show() def build_header(self, html=""): header = [""" title""" % (self._view.get_bg_color(), self._view.get_fg_color(), self._moz_font, self._moz_size, self._css)] header.append("""") header.append(html) header.append("""""") return "\n".join(header) def render(self, html, stream_url="file:///", display_id=None): if self._realized or utils.RUNNING_SUGAR: #pad html to solve truncation problems html = html + " "*80*5 if stream_url is None: stream_url = "file:///" self._moz.render_data(html, long(len(html)), stream_url, 'text/html') else: logging.warning("HTML widget not realized") def dl_interrupt(self): pass def _new_window(self, mozembed, retval, chromemask): # hack to try to properly load links that want a new window self.emit('open-uri', mozembed.get_link_message()) def _realize(self, widget, realized): self._realized = realized self._moz.load_url("about:blank") #self._moz.load_url("http://google.com") def _link_message(self, data): if not utils.RUNNING_HILDON: self.emit('link-message', self._moz.get_link_message()) def _link_clicked(self, mozembed, link): link = link.strip() #As of ubuntu 10.04, I get tons of spurious file:/// or ajax proxy url #signals that I have to trap if link == "file:///" or link.startswith("http://localhost:80"): return False self.emit('open-uri', link) return True #don't load url please def _gconf_reset_moz_font(self, client, *args, **kwargs): self._reset_moz_font() def _reset_moz_font(self): def isNumber(x): try: float(x) return True except: return False def isValid(x): if x in ["Bold", "Italic", "Regular","BoldItalic"]:#,"Demi","Oblique" Book return False return True moz_font = self._conf.get_string('/desktop/gnome/interface/font_name') if moz_font is None: moz_font = "Sans Serif 12" #take just the beginning for the font name. prepare for dense, unreadable code self._moz_font = " ".join(map(str, [x for x in moz_font.split() if not isNumber(x)])) self._moz_font = "'"+self._moz_font+"','"+" ".join(map(str, [x for x in moz_font.split() if isValid(x)])) + "',Arial" self._moz_size = int([x for x in moz_font.split() if isNumber(x)][-1])+4 gobject.type_register(PTVMozilla) PenguinTV-4.2.0/penguintv/html/PTVGtkHtml.py0000644000000000000000000001273611170427207015575 0ustar # class template for various html widgets import threading import os, os.path import re import time import logging import gobject import gtk import utils import PTVhtml import ThreadPool import SimpleImageCache IMG_REGEX = re.compile("", re.IGNORECASE|re.DOTALL) class PTVGtkHtml(PTVhtml.PTVhtml): def __init__(self, view, home, share_path): PTVhtml.PTVhtml.__init__(self, view, home, share_path) self._htmlview = None self._document_lock = threading.Lock() self._image_cache = SimpleImageCache.SimpleImageCache() self._css = "" self._last_link_time = 0 self._view = view f = open(os.path.join(share_path, "gtkhtml.css")) for l in f.readlines(): self._css += l f.close() self._image_pool = ThreadPool.ThreadPool(5, "PlanetView") self._dl_total = 0 self._dl_count = 0 def finish(self): self._image_pool.joinAll(False, False) del self._image_pool def is_ajax_ok(self): return False def post_show_init(self, widget): import gtkhtml2 import SimpleImageCache import threading htmlview = gtkhtml2.View() self._document = gtkhtml2.Document() self._document.connect("link-clicked", self._link_clicked) htmlview.connect("on_url", self._on_url) self._document.connect("request-url", self._request_url) htmlview.get_vadjustment().set_value(0) htmlview.get_hadjustment().set_value(0) self._document.clear() htmlview.set_document(self._document) self._htmlview = htmlview widget.set_property("shadow-type",gtk.SHADOW_IN) widget.set_hadjustment(self._htmlview.get_hadjustment()) widget.set_vadjustment(self._htmlview.get_vadjustment()) widget.add(self._htmlview) self._scrolled_window = widget def build_header(self, html=""): header = [""" title""" % self._css] header.append(html) header.append("""""") return "\n".join(header) def render(self, html, stream_url="file:///", display_id=None): self._document_lock.acquire() imgs = IMG_REGEX.findall(html) uncached=0 for url in imgs: if not self._image_cache.is_cached(url): uncached+=1 if uncached > 0: self._document.clear() self._document.open_stream("text/html") d = { "background_color": self._view.get_bg_color(), "loading": _("Loading images...")} self._document.write_stream("""%(loading)s""" % d) self._document.close_stream() self._document_lock.release() self._dl_count = 0 self._dl_total = uncached for url in imgs: if not self._image_cache.is_cached(url): self._image_pool.queueTask(self._do_download_image, (url, display_id), self._image_dl_cb) self._image_pool.queueTask(self._download_done, (display_id, html)) else: self._scrolled_window.get_hadjustment().set_value(0) self._scrolled_window.get_vadjustment().set_value(0) self._document.clear() self._document.open_stream("text/html") self._document.write_stream(html) self._document.close_stream() self._document_lock.release() def dl_interrupt(self): self._image_pool.joinAll(False, False) self._dl_count = 0 self._dl_total = 0 def _do_download_image(self, args): url, display_id = args self._image_cache.get_image(url) #print "do download", display_id return display_id def _image_dl_cb(self, display_id): #print "dl_cb", display_id, self._view.get_display_id() if display_id == self._view.get_display_id(): self._dl_count += 1 def _download_done(self, args): display_id, html = args count = 0 last_count = self._dl_count #print "dl_done", display_id, self._view.get_display_id() while display_id == self._view.get_display_id() and count < (10 * 2): if last_count != self._dl_count: #if downloads are still coming in, reset counter last_count = self._dl_count count = 0 if self._dl_count >= self._dl_total: gobject.idle_add(self._images_loaded, display_id, html) return count += 1 time.sleep(0.5) gobject.idle_add(self._images_loaded, display_id, html) def _images_loaded(self, display_id, html): #if we're changing, nevermind. #also make sure entry is the same and that we shouldn't be blanks #print "loaded", display_id, self._view.get_display_id() if display_id == self._view.get_display_id(): va = self._scrolled_window.get_vadjustment() ha = self._scrolled_window.get_hadjustment() self._document_lock.acquire() self._document.clear() self._document.open_stream("text/html") self._document.write_stream(html) self._document.close_stream() self._document_lock.release() return False def _request_url(self, document, url, stream): try: image = self._image_cache.get_image(url) stream.write(image) stream.close() except Exception, ex: stream.close() def _link_clicked(self, document, link): if not utils.RUNNING_HILDON: link = link.strip() self.emit('open-uri', link) def _on_url(self, view, url): if utils.RUNNING_HILDON: now = time.time() #prevent double-clicks print now - self._last_link_time if now - self._last_link_time < 1.0: logging.debug("detected double-click, ignoring") return self._last_link_time = now if url is None: return link = url.strip() self.emit('open-uri', link) else: if url is None: url = "" self.emit('link-message', url) PenguinTV-4.2.0/penguintv/html/__init__.py0000644000000000000000000000000011166715631015353 0ustar PenguinTV-4.2.0/penguintv/html/PTVhtml.py0000644000000000000000000000250511170431467015163 0ustar # class template for various html widgets import gobject class PTVhtml(gobject.GObject): __gsignals__ = { 'link-message': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ([gobject.TYPE_STRING])), 'open-uri': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ([gobject.TYPE_STRING])) } def __init__(self, view, home, share_path): gobject.GObject.__init__(self) def finish(self): assert False def is_ajax_ok(self): """does the widget support ajax""" assert False def post_show_init(self, widget): """widget must be a gtkscrolledwindow. HTML widget will install itself in the scrolled window and show itself""" assert False def build_header(self, html): """build the html header needed (fonts, css, etc) html is a string to be appended to the header before closing tags""" assert False def render(self, html, stream_url="file:///", image_id=None): """html is a string of html stream_url is the 'root' path or whatever the terminology is display_id is an object that an image downloader will compare against to determine if it should continue displaying the image""" assert False def dl_interrupt(self): """stop downloading images (if applicable)""" assert False PenguinTV-4.2.0/penguintv/FeedFilterPropertiesDialog.py0000644000000000000000000000544110646750251020073 0ustar # Written by Owen Williams # see LICENSE for license information import penguintv import gtk from ptvDB import FeedAlreadyExists class FeedFilterPropertiesDialog: def __init__(self,xml,app): self._xml = xml self._app = app self._window = xml.get_widget("window_filter_properties") for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self._filter_name_entry = self._xml.get_widget("filter_name_entry") self._query_entry = self._xml.get_widget("query_entry") self._pointed_feed_label = self._xml.get_widget("pointed_feed_label") self._edit_tags_widget = xml.get_widget("edit_tags_widget") self._pointed_feed_id = -1 self._feed_id = 0 self._old_name = "" self._old_query = "" self._old_tags = [] def on_save_values_activate(self, event): title = self._filter_name_entry.get_text() query = self._query_entry.get_text() if title != self._old_name or query != self._old_query: try: self._app.set_feed_filter(self._feed_id, title, query) self._old_query = query self._old_name = title except FeedAlreadyExists: dialog = gtk.Dialog(title=_("Filter Already Exists"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("A filter already exists for that feed and query. Please choose a different query.")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.run() dialog.hide() del dialog self._query_entry.grab_focus() return False tags=[tag.strip() for tag in self._edit_tags_widget.get_text().split(',')] self._app.apply_tags_to_feed(self._feed_id, self._old_tags, tags) return True def show(self): self._window.set_transient_for(self._app.main_window.get_parent()) self._filter_name_entry.grab_focus() self._window.show() def on_window_feed_filter_delete_event(self, widget, event): return self._window.hide_on_delete() def hide(self): self._filter_name_entry.set_text("") self._query_entry.set_text("") self._pointed_feed_label.set_text("") self._window.hide() def set_pointed_feed_id(self, feed_id): self._pointed_feed_id = feed_id def set_feed_id(self, feed_id): self._feed_id = feed_id def set_query(self, query): self._query_entry.set_text(query) self._old_query = query def set_filter_name(self, name): self._filter_name_entry.set_text(name) self._old_name = name def set_tags(self, tags): text = "" if tags: for tag in tags: text=text+tag+", " text = text[0:-2] self._edit_tags_widget.set_text(text) self._old_tags = tags def on_close_button_clicked(self,event): if self.on_save_values_activate(None): self.hide() def on_revert_button_clicked(self, event): self.set_filter_name(self._old_name) self.set_query(self._old_query) PenguinTV-4.2.0/penguintv/ptvDB.py0000644000000000000000000035356411450514272013713 0ustar # Written by Owen Williams # see LICENSE for license information import logging try: import sqlite3 as sqlite logging.info("Using built-in sqlite3") except: logging.info("Using external pysqlite2") from pysqlite2 import dbapi2 as sqlite from math import floor,ceil import time import string import urllib, urlparse from urllib2 import URLError from types import * import ThreadPool import sys, os, os.path, re import gc import locale import gettext try: set except: from sets import Set as set import traceback import pickle import hashlib import socket socket.setdefaulttimeout(30.0) #locale.setlocale(locale.LC_ALL, '') gettext.install('penguintv', '/usr/share/locale') gettext.bindtextdomain('penguintv', '/usr/share/locale') gettext.textdomain('penguintv') _=gettext.gettext import utils import IconManager import OfflineImageCache if utils.HAS_LUCENE: import Lucene if utils.HAS_XAPIAN: import PTVXapian if utils.HAS_GCONF: try: import gconf except: from gnome import gconf if utils.RUNNING_SUGAR: # or utils.RUNNING_HILDON: USING_FLAG_CACHE = False else: USING_FLAG_CACHE = True #USING_FLAG_CACHE = False LATEST_DB_VER = 8 NEW = 0 EXISTS = 1 MODIFIED = 2 DELETED = 3 BOOL = 1 INT = 2 STRING = 3 if utils.RUNNING_SUGAR or utils.RUNNING_HILDON: MAX_ARTICLES = 50 else: MAX_ARTICLES = 1000 _common_unicode = { u'\u0093':u'"', u'\u0091': u"'", u'\u0092': u"'", u'\u0094':u'"', u'\u0085':u'...', u'\u2026':u'...'} #Possible entry flags F_ERROR = 64 F_DOWNLOADING = 32 F_UNVIEWED = 16 F_DOWNLOADED = 8 F_NEW = 4 F_PAUSED = 2 F_MEDIA = 1 #arguments for poller A_POOLED_POLL = 64 # if this is set, don't do housework after each poll A_ERROR_FEEDS = 32 A_DO_REINDEX = 16 A_ALL_FEEDS = 8 A_AUTOTUNE = 4 A_IGNORE_ETAG = 2 A_DELETE_ENTRIES = 1 #download statuses D_NOT_DOWNLOADED = 0 D_DOWNLOADING = 1 D_DOWNLOADED = 2 D_RESUMABLE = 3 D_ERROR = -1 D_WARNING = -2 #tag types T_ALL = 0 T_TAG = 1 T_SEARCH = 2 T_BUILTIN = 3 #obsolete tag-based flags (needed for schema upgrades) T_NOAUTODOWNLOAD="noautodownload" T_NOSEARCH="nosearch" T_NOAUTOEXPIRE="noautoexpire" T_NOTIFYUPDATES="notify" #new bit-based flags FF_NOAUTODOWNLOAD = 1 FF_NOSEARCH = 2 FF_NOAUTOEXPIRE = 4 FF_NOTIFYUPDATES = 8 FF_ADDNEWLINES = 16 FF_MARKASREAD = 32 FF_NOKEEPDELETED = 64 FF_DOWNLOADSINGLE = 128 DB_FILE="penguintv4.db" STRIPPER_REGEX = re.compile('<.*?>') class ptvDB: entry_flag_cache = {} def __init__(self, polling_callback=None, change_setting_cb=None): self._exiting = False self.searcher = None self.home = utils.get_home() try: os.stat(self.home) except: try: os.mkdir(self.home) except: raise DBError, "error creating directories: "+self.home if not os.access(self.home, os.R_OK | os.W_OK | os.X_OK): raise DBError, "Insufficient access to "+self.home self._initializing_db = False try: #also check db connection in _process_feed if os.path.isfile(os.path.join(self.home,"penguintv4.db")) == False: import shutil self._initializing_db = True if os.path.isfile(os.path.join(self.home,"penguintv3.db")): try: shutil.copyfile(os.path.join(self.home,"penguintv3.db"), os.path.join(self.home,"penguintv4.db")) except: raise DBError,"couldn't create new database file" elif os.path.isfile(os.path.join(self.home,"penguintv2.db")): try: shutil.copyfile(os.path.join(self.home,"penguintv2.db"), os.path.join(self.home,"penguintv4.db")) except: raise DBError,"couldn't create new database file" elif os.path.isfile(os.path.join(self.home,"penguintv.db")): try: shutil.copyfile(os.path.join(self.home,"penguintv.db"), os.path.join(self.home,"penguintv4.db")) except: raise DBError,"couldn't create new database file" self._db=sqlite.connect(os.path.join(self.home,"penguintv4.db"), timeout=30.0, isolation_level="IMMEDIATE") except: raise DBError,"error connecting to database" self._c = self._db.cursor() db_ver = self.get_version_info()[0] if db_ver == -1: logging.info("database will need init") self._initializing_db = True self._cancel_poll_multiple = False self._c.execute('PRAGMA synchronous="NORMAL"') if not utils.RUNNING_SUGAR and not utils.RUNNING_HILDON: self._c.execute('PRAGMA cache_size=6000') self.cache_dirty = True try: if not self._initializing_db: self.cache_dirty = self.get_setting(BOOL, "feed_cache_dirty", True) except: pass if polling_callback is None: self.polling_callback=self._polling_callback else: self.polling_callback = polling_callback self._change_setting_cb = change_setting_cb self._blacklist = [] if utils.HAS_SEARCH: if utils.HAS_LUCENE: self.searcher = Lucene.Lucene() if utils.HAS_XAPIAN: self.searcher = PTVXapian.PTVXapian() else: logging.error("Have search, but no search engine? Programming error!") assert False if not self._initializing_db: try: self._blacklist = self.get_feeds_for_flag(FF_NOSEARCH) except: logging.error("possible old database version") if utils.HAS_GCONF: self._conf = gconf.client_get_default() self._icon_manager = IconManager.IconManager(self.home) self._image_cache = None cache_images = self.get_setting(BOOL, "/apps/penguintv/cache_images_locally", False) if cache_images: store_location = self.get_setting(STRING, '/apps/penguintv/media_storage_location', os.path.join(utils.get_home(), "media")) if store_location != "": self._image_cache = OfflineImageCache.OfflineImageCache(os.path.join(store_location, "images")) self._reindex_entry_list = [] self._reindex_feed_list = [] self._image_cache_list = [] self._image_uncache_list = [] self._filtered_entries = {} self._parse_list = [] def _db_execute(self, c, command, args=()): #if "FROM FEEDS" in command.upper(): #traceback.print_stack() #if "UPDATE" in command.upper(): # print command, args # traceback.print_stack() try: unicode_check = True if command.upper().startswith("SELECT"): unicode_check = False if unicode_check: u_args = [] for i, val in enumerate(args): if type(val) is str: #logging.debug("DB Warning: String argument, making unicode: %s %i %s" % (command,i,val)) val = unicode(val) u_args.append(val) u_args = tuple(u_args) return c.execute(command, u_args) else: return c.execute(command, args) except Exception, e: #traceback.print_stack() logging.error("Database error:" + str(command) + " args:" + str(args)) raise e #def __del__(self): # self.finish() def finish(self, vacuumok=True, majorsearchwait=False, correctthread=True): #allow multiple finishes if self._exiting: return self._exiting=True self._cancel_poll_multiple = True if utils.HAS_SEARCH and self.searcher is not None: if not majorsearchwait and self.searcher.is_indexing(only_this_thread=True): logging.debug("not waiting for reindex") self.searcher.finish(False) else: if len(self._reindex_entry_list) > 0 or len(self._reindex_feed_list) > 0: logging.info("have leftover things to reindex, reindexing") #don't do it threadedly or else we will interrupt it on the next line self.reindex(threaded=False) #it's usually not much... self.searcher.finish(True) self.cache_images() if self._image_cache is not None: self._image_cache.finish() #FIXME: lame, but I'm being lazy #if randint(1,100) == 1: # print "cleaning up unreferenced media" # self.clean_file_media() if correctthread: import random if random.randint(1,80) == 1 and vacuumok: logging.info("compacting database") self._c.execute('VACUUM') self._c.close() self._db.close() def get_version_info(self): try: self._db_execute(self._c, u'SELECT rowid FROM feeds LIMIT 1') except Exception, e: logging.debug("db except: %s" % str(e)) return (-1, LATEST_DB_VER) self._db_execute(self._c, u'SELECT value FROM settings WHERE data="db_ver"') db_ver = self._c.fetchone() if db_ver is None: db_ver = 0 else: db_ver = int(db_ver[0]) return (db_ver, LATEST_DB_VER) def maybe_initialize_db(self): """returns true if new database""" db_ver = self.get_version_info()[0] if db_ver == -1: logging.info("initializing database") self._initializing_db = True self._init_database() return True try: #logging.debug("current database version is " + str(db_ver)) if db_ver == 0: self._migrate_database_one_two() if db_ver < 2: self._migrate_database_one_two() if db_ver < 3: self._migrate_database_two_three() if db_ver < 4: self._migrate_database_three_four() if db_ver < 5: self._migrate_database_four_five() if db_ver < 6: self._migrate_database_five_six() if db_ver < 7: self._migrate_database_six_seven() if db_ver < 8: self._migrate_database_seven_eight() self.clean_database_media() if db_ver > LATEST_DB_VER: logging.warning("This database comes from a later version of PenguinTV and may not work with this version") raise DBError, "db_ver is "+str(db_ver)+" instead of "+str(LATEST_DB_VER) except Exception, e: logging.error("exception:" + str(e)) #if self.searcher.needs_index: # print "indexing for the first time" # self.searcher.Do_Index_Threaded() if not utils.RUNNING_HILDON: self._check_settings_location() self.fix_tags() self._fix_indexes() return False def done_initializing(self): self._initializing_db = False def _init_database(self): self._db_execute(self._c, u"""CREATE TABLE settings ( data TEXT NOT NULL, value TEXT );""") #for pointer / pointed filter feeds, feed_pointer is feed_id, and description is query self._db_execute(self._c, u"""CREATE TABLE feeds ( id INTEGER PRIMARY KEY, url TEXT NOT NULL, pollfail BOOL NOT NULL, title TEXT, description TEXT, link TEXT, etag TEXT, pollfreq INT NOT NULL, lastpoll DATE, newatlast INT, flags INTEGER NOT NULL DEFAULT 0, feed_pointer INT, image TEXT, UNIQUE(url) );""") self._db_execute(self._c, u"""CREATE TABLE entries ( id INTEGER PRIMARY KEY, feed_id INTEGER UNSIGNED NOT NULL, title TEXT, creator TEXT, description TEXT, fakedate DATE, date DATE, guid TEXT, link TEXT, keep INTEGER, read INTEGER NOT NULL, hash TEXT );""") self._db_execute(self._c, u"""CREATE TABLE media ( id INTEGER PRIMARY KEY, entry_id INTEGER UNSIGNED NOT NULL, feed_id INTEGER UNSIGNED NOT NULL, url TEXT NOT NULL, file TEXT, mimetype TEXT, download_status INTEGER NOT NULL, errormsg TEXT, viewed BOOL NOT NULL, keep BOOL NOT NULL, length INTEGER, download_date DATE, thumbnail TEXT ); """) self._db_execute(self._c, u"""CREATE TABLE tags ( tag TEXT, feed_id INT UNSIGNED NOT NULL, query TEXT, favorite INT, type INT);""") self._db_execute(self._c, u"""CREATE INDEX pollindex ON entries (fakedate DESC);""") self._db_execute(self._c, u"""CREATE INDEX feedindex ON feeds (title DESC);""") self._db_execute(self._c, u"""CREATE INDEX e_feedindex ON entries (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_feedindex ON media (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_entryindex ON media (entry_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX t_feedindex ON tags (feed_id DESC);""") self._db_execute(self._c, u'UPDATE entries SET keep=0') self._db.commit() self._db_execute(self._c, u"""INSERT INTO settings (data, value) VALUES ("db_ver", ?)""", (LATEST_DB_VER,)) self._db_execute(self._c, u'INSERT INTO settings (data, value) VALUES ("frequency_table_update",0)') self._db.commit() def _migrate_database_one_two(self): #add table settings logging.info("upgrading to database schema 2") try: self._db_execute(self._c, u'SELECT * FROM settings') #if it doesn't exist, except: #we create it self._db_execute(self._c, u"""CREATE TABLE settings ( id INTEGER PRIMARY KEY, data NOT NULL, value );""") self._db_execute(self._c, u"""CREATE TABLE tags ( id INTEGER PRIMARY KEY, tag, feed_id INT UNSIGNED NOT NULL);""") #add fake_date column try: self._db_execute(self._c, u'ALTER TABLE entries ADD COLUMN fakedate DATE') self._db_execute(self._c, u'UPDATE entries SET fakedate = date') except sqlite.OperationalError,e: if e != "duplicate column name: fakedate": logging.warning(str(e)) #else pass #change db_ver (last thing) self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN pollfreq INT') self._db_execute(self._c, u'UPDATE feeds SET pollfreq=1800') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN lastpoll DATE') self._db_execute(self._c, u'UPDATE feeds SET lastpoll=?',(int(time.time())-(30*60),)) self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN newatlast INT') self._db_execute(self._c, u'UPDATE feeds SET newatlast=0') try: self._db_execute(self._c, u'INSERT INTO settings (data, value) VALUES ("db_ver",2)') except: pass try: self._db_execute(self._c, u'UPDATE settings SET value=2 WHERE data="db_ver"') except: pass self._db.commit() def _migrate_database_two_three(self): """version 3 added flag cache, entry_count_cache, and unread_count_cache""" logging.info("upgrading to database schema 3") self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN flag_cache INT') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN entry_count_cache INT') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN unread_count_cache INT') self._db_execute(self._c, u'UPDATE settings SET value=3 WHERE data="db_ver"') self._db_execute(self._c, u'INSERT INTO settings (data, value) VALUES ("feed_cache_dirty",1)') self._db.commit() def _migrate_database_three_four(self): """version 4 adds fulltext table""" logging.info("upgrading to database schema 4") self._db_execute(self._c, u'ALTER TABLE tags ADD COLUMN type INT') self._db_execute(self._c, u'ALTER TABLE tags ADD COLUMN query') self._db_execute(self._c, u'ALTER TABLE tags ADD COLUMN favorite INT') self._db_execute(self._c, u'UPDATE tags SET type=?',(T_TAG,)) #they must all be regular tags right now self._db_execute(self._c, u'UPDATE settings SET value=4 WHERE data="db_ver"') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN feed_pointer INT') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN link') self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN image') self._db_execute(self._c, u'ALTER TABLE media ADD COLUMN download_date DATE') self._db_execute(self._c, u'ALTER TABLE media ADD COLUMN thumbnail') self._db_execute(self._c, u'ALTER TABLE media ADD COLUMN feed_id INTEGER') self._db_execute(self._c, u'UPDATE feeds SET feed_pointer=-1') #no filters yet! self._db_execute(self._c, u'UPDATE feeds SET link=""') self._db_execute(self._c, u"""CREATE TABLE terms ( id INTEGER PRIMARY KEY, term, frequency INT);""") self._db_execute(self._c, u'INSERT INTO settings (data, value) VALUES ("frequency_table_update",0)') self._db.commit() logging.info("building new column, please wait...") self._db_execute(self._c, u'SELECT id FROM feeds') for feed_id, in self._c.fetchall(): self._db_execute(self._c, u'SELECT media.id FROM entries INNER JOIN media ON media.entry_id = entries.id WHERE entries.feed_id=?', (feed_id,)) media = self._c.fetchall() media = [m[0] for m in media] if len(media) > 0: qmarks = "?,"*(len(media)-1)+"?" self._db_execute(self._c, u'UPDATE media SET feed_id=? WHERE id IN ('+qmarks+')', tuple([feed_id] + media)) self._db.commit() def _migrate_database_four_five(self): """version five gets rid of 'id' column, 'new' column, adds option_flags column""" logging.info("upgrading to database schema 5, please wait...") self.__remove_columns("settings","""data TEXT NOT NULL, value TEXT""", "data, value") self.__remove_columns("feeds", """id INTEGER PRIMARY KEY, url TEXT NOT NULL, polled INT NOT NULL, pollfail BOOL NOT NULL, title TEXT, description TEXT, link TEXT, modified INT UNSIGNED NOT NULL, etag TEXT, pollfreq INT NOT NULL, lastpoll DATE, newatlast INT, flag_cache INT, entry_count_cache INT, unread_count_cache INT, feed_pointer INT, image TEXT, UNIQUE(url)""", """id, url, polled, pollfail, title, description, link, modified, etag, pollfreq, lastpoll, newatlast, flag_cache, entry_count_cache, unread_count_cache, feed_pointer, image""") self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN flags INTEGER NOT NULL DEFAULT 0') self.__update_flags(T_NOAUTODOWNLOAD, FF_NOAUTODOWNLOAD) self.__update_flags(T_NOSEARCH, FF_NOSEARCH) self.__update_flags(T_NOAUTOEXPIRE, FF_NOAUTOEXPIRE) self.__update_flags(T_NOTIFYUPDATES, FF_NOTIFYUPDATES) self.__remove_columns("entries", """id INTEGER PRIMARY KEY, feed_id INTEGER UNSIGNED NOT NULL, title TEXT, creator TEXT, description TEXT, fakedate DATE, date DATE, guid TEXT, link TEXT, read INTEGER NOT NULL, old INTEGER NOT NULL""", "id, feed_id, title, creator, description, fakedate, date, guid, link, read, old") self.__remove_columns("media", """id INTEGER PRIMARY KEY, entry_id INTEGER UNSIGNED NOT NULL, feed_id INTEGER UNSIGNED NOT NULL, url TEXT NOT NULL, file TEXT, mimetype TEXT, download_status INTEGER NOT NULL, viewed BOOL NOT NULL, keep BOOL NOT NULL, length INTEGER, download_date DATE, thumbnail TEXT""", "id, entry_id, feed_id, url, file, mimetype, download_status, viewed, keep, length, download_date, thumbnail") self.__remove_columns("tags", """tag TEXT, feed_id INT UNSIGNED NOT NULL, query TEXT, favorite INT, type""", "tag, feed_id, query, favorite, type") self._db_execute(self._c, u'UPDATE settings SET value=5 WHERE data="db_ver"') self._db.commit() def _migrate_database_five_six(self): logging.info("upgrading to database schema 6, please wait...") self._db_execute(self._c, u'ALTER TABLE entries ADD COLUMN keep BOOL') self._db_execute(self._c, u'UPDATE entries SET keep=0') self.__remove_columns("feeds", """id INTEGER PRIMARY KEY, url TEXT NOT NULL, pollfail BOOL NOT NULL, title TEXT, description TEXT, link TEXT, modified INT UNSIGNED NOT NULL, etag TEXT, pollfreq INT NOT NULL, lastpoll DATE, newatlast INT, flags INTEGER NOT NULL DEFAULT 0, flag_cache INT, entry_count_cache INT, unread_count_cache INT, feed_pointer INT, image TEXT, UNIQUE(url)""", """id, url, pollfail, title, description, link, modified, etag, pollfreq, lastpoll, newatlast, flags, flag_cache, entry_count_cache, unread_count_cache, feed_pointer, image""") self._db_execute(self._c, u'ALTER TABLE feeds ADD COLUMN first_entry_cache TEXT') self._db_execute(self._c, u'UPDATE feeds SET first_entry_cache=""') self._db_execute(self._c, u'UPDATE settings SET value=6 WHERE data="db_ver"') self._db_execute(self._c, u"""CREATE INDEX pollindex ON entries (fakedate DESC);""") self._db_execute(self._c, u"""CREATE INDEX feedindex ON feeds (title DESC);""") self._db_execute(self._c, u"""CREATE INDEX e_feedindex ON entries (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_feedindex ON media (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_entryindex ON media (entry_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX t_feedindex ON tags (feed_id DESC);""") self._db.commit() def _migrate_database_six_seven(self): logging.info("upgrading to database schema 7, please wait...") self.__remove_columns("feeds", """id INTEGER PRIMARY KEY, url TEXT NOT NULL, pollfail BOOL NOT NULL, title TEXT, description TEXT, link TEXT, etag TEXT, pollfreq INT NOT NULL, lastpoll DATE, newatlast INT, flags INTEGER NOT NULL DEFAULT 0, feed_pointer INT, image TEXT, UNIQUE(url)""", """id, url, pollfail, title, description, link, etag, pollfreq, lastpoll, newatlast, flags, feed_pointer, image""") self.__remove_columns("entries", """id INTEGER PRIMARY KEY, feed_id INTEGER UNSIGNED NOT NULL, title TEXT, creator TEXT, description TEXT, fakedate DATE, date DATE, guid TEXT, link TEXT, keep INTEGER, read INTEGER NOT NULL""", """id, feed_id, title, creator, description, fakedate, date, guid, link, keep, read""") self._db_execute(self._c, u'ALTER TABLE entries ADD COLUMN hash TEXT') logging.info("Creating entry hashes") self._db_execute(self._c, u'SELECT rowid, description, title, guid FROM entries') entries = self._c.fetchall() hashes = [] for entry_id, description, title, guid in entries: entry_hash = self._get_hash(guid, title, description) self._db_execute(self._c, u'UPDATE entries SET hash=? WHERE rowid=?', \ (entry_hash, entry_id)) self._db.commit() self._db_execute(self._c, u'UPDATE settings SET value=7 WHERE data="db_ver"') self._db.commit() def _migrate_database_seven_eight(self): logging.info("upgrading to database schema 8, please wait...") self._db_execute(self._c, u'ALTER TABLE media ADD COLUMN errormsg TEXT') self._db_execute(self._c, u'UPDATE settings SET value=8 WHERE data="db_ver"') self._db.commit() def __remove_columns(self, table, new_schema, new_columns): """dangerous internal function without injection checking. (only called by migration function and with no user-programmable arguments)""" logging.info("updating %s ..." % table) self._c.execute(u"CREATE TEMPORARY TABLE t_backup(" + new_schema + ")") self._c.execute(u"INSERT INTO t_backup SELECT "+new_columns+" FROM " + table) self._c.execute(u"DROP TABLE "+ table) self._c.execute(u"CREATE TABLE " + table + " ("+ new_schema +")") self._c.execute(u"INSERT INTO " + table + " SELECT " + new_columns + " FROM t_backup") self._c.execute(u"DROP TABLE t_backup") self._db.commit() def __update_flags(self, tag_flag, int_flag): """for migration. take all feeds with tag tag_flag and add int_flag to its flag value. Then delete the tag_flag""" flagged_feeds = self.get_feeds_for_tag(tag_flag) if len(flagged_feeds) > 0: qmarks = "?,"*(len(flagged_feeds)-1)+"?" #print u'UPDATE feeds SET flags = flags + ? WHERE feeds.rowid in ('+qmarks+')' #print (int_flag,) + tuple(flagged_feeds) self._db_execute(self._c, u'UPDATE feeds SET flags = flags + ? WHERE feeds.rowid in ('+qmarks+')', (int_flag,) + tuple(flagged_feeds)) self.remove_tag(tag_flag) def _get_hash(self, guid, title, description): s = hashlib.sha1() text = STRIPPER_REGEX.sub('', ' '.join((guid, title, description))) s.update(text) return s.hexdigest() def _fix_indexes(self): try: self._db_execute(self._c, 'SELECT sql FROM sqlite_master WHERE name="pollindex"') result = self._c.fetchone()[0] except: result = "" if "fakedate" not in result: logging.info("Rebuilding indexes") #this means the user was using svn before I fixed the indexes self._db_execute(self._c, 'SELECT name FROM sqlite_master WHERE type="index"') result = self._c.fetchall() for index in result: if 'autoindex' not in index[0]: self._db_execute(self._c, 'DROP INDEX %s' % index) self._db.commit() self._db_execute(self._c, u"""CREATE INDEX pollindex ON entries (fakedate DESC);""") self._db_execute(self._c, u"""CREATE INDEX feedindex ON feeds (title DESC);""") self._db_execute(self._c, u"""CREATE INDEX e_feedindex ON entries (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_feedindex ON media (feed_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX m_entryindex ON media (entry_id DESC);""") self._db_execute(self._c, u"""CREATE INDEX t_feedindex ON tags (feed_id DESC);""") logging.info("Indexes rebuilt") def clean_database_media(self): self._db_execute(self._c, "SELECT rowid,file,entry_id FROM media") result = self._c.fetchall() for item in result: self._db_execute(self._c, "SELECT title FROM entries WHERE rowid=?",(item[2],)) title = self._c.fetchone() if title is None: #this entry doesn't exist anymore self._db_execute(self._c, "DELETE FROM media WHERE rowid=?",(item[0],)) self._db_execute(self._c, "SELECT rowid,file FROM media WHERE download_status=?", (D_DOWNLOADED,)) result = self._c.fetchall() for rowid, filename in self._c: if not os.path.isfile(filename): self._db_execute(self._c, "UPDATE media SET download_status=? WHERE rowid=?", (D_NOT_DOWNLOADED, rowid)) self._db.commit() #this is called when we change storage schemas def clean_file_media(self): """walks the media dir, and deletes anything that doesn't have an entry in the database. Also deletes dirs with only a playlist or with nothing""" media_dir = self.get_setting(STRING, '/apps/penguintv/media_storage_location', os.path.join(utils.get_home(), "media")) d = os.walk(media_dir) for root,dirs,files in d: try: image_index = dirs.index("images") del dirs[image_index] except: pass if root!=media_dir: for file in files: if file != "playlist.m3u": self._db_execute(self._c, u"SELECT rowid, download_status FROM media WHERE file=?",(os.path.join(root, file),)) result = self._c.fetchone() if result is None: logging.info("deleting "+os.path.join(root,file)) os.remove(os.path.join(root,file)) elif result[1] == D_NOT_DOWNLOADED: #db says it's not downloaded, so remove it logging.info("deleting "+os.path.join(root,file)) os.remove(os.path.join(root,file)) d = os.walk(media_dir) for root,dirs,files in d: try: image_index = dirs.index("images") del dirs[image_index] except: pass if root!=media_dir: if len(files) == 1: if files[0] == "playlist.m3u": logging.info("deleting "+root) utils.deltree(root) elif len(files) == 0: logging.info("deleting "+root) utils.deltree(root) #now clean up images? if self._image_cache is not None: self._image_cache.cleanup() def relocate_media(self, old_dir, new_dir): """rewrite db so that media files point to a new place. Lots of checking involved""" if old_dir[-1] == '/' or old_dir[-1] == '\\': old_dir = old_dir[:-1] assert os.access(new_dir, os.F_OK & os.R_OK & os.W_OK & os.X_OK) assert os.access(old_dir, os.F_OK & os.R_OK & os.W_OK & os.X_OK) self._db_execute(self._c, u'SELECT rowid, file FROM media WHERE file IS NOT NULL') rows = self._c.fetchall() for rowid, filename in rows: assert filename.startswith(old_dir) for rowid, filename in rows: new_filename = os.path.join(new_dir, filename[len(old_dir) + 1:]) self._db_execute(self._c, u'UPDATE media SET file=? WHERE rowid=?', (new_filename, rowid)) self._db.commit() def _migrate_storage_style(self, media_dir, style): self.clean_file_media() self._db_execute(self._c, u'SELECT media.rowid, media.file, media.feed_id, feeds.title, media.download_date FROM media INNER JOIN feeds ON media.feed_id = feeds.rowid WHERE media.download_status=? ORDER BY feeds.title', (D_DOWNLOADED,)) result = self._c.fetchall() for media_id, oldpath, feed_id, title, download_date in result: if not os.path.isfile(oldpath): logging.warning("File doesn't exist for migration: %i %s" % (media_id, oldpath)) continue #figure out new filename filename = os.path.basename(oldpath) if style=="BYDATE": newdir = os.path.join(media_dir, utils.get_dated_dir(time.gmtime(download_date))) elif style=="BYNAME": newdir = os.path.join(media_dir, utils.make_pathsafe(title)) else: logging.error("Programming Error: style must be 'BYDATE' or 'BYNAME'") assert False newpath = os.path.join(newdir, filename) #be sure there are no collisions i=0 filen, ext = os.path.splitext(filename) while os.path.isfile(newpath): newpath = os.path.join(newdir, "%s-%i%s" % (filen,i,ext)) i=i+1 if not os.path.isdir(newdir): os.mkdir(newdir) try: os.rename(oldpath, newpath) #on fail, db update doesn't happen self._db_execute(self._c, u'UPDATE media SET file=? WHERE rowid=?', (newpath, media_id)) except Exception, e: logging.warning("Trouble moving media file %s to %s:\n %s\n" % (oldpath, newpath, str(e))) self._db.commit() #clean up self.clean_file_media() def set_media_storage_style_dated(self, media_dir): self._migrate_storage_style(media_dir, 'BYDATE') def set_media_storage_style_named(self, media_dir): self._migrate_storage_style(media_dir, 'BYNAME') def _check_settings_location(self): """Do we suddenly have gconf, where before we were using the db? If so, migrate from db to gconf""" settings_in_db = self.get_setting(BOOL, "settings_in_db", utils.HAS_GCONF, force_db=True) settings_now_in_db = settings_in_db if settings_in_db: if utils.HAS_GCONF: self._db_execute(self._c, u'SELECT data, value FROM settings') settings = self._c.fetchall() for data, value in settings: if data.startswith('/'): val = self._conf.get_default_from_schema(data) if val is None: #not in schema, let it be replaced with a default continue if val.type == gconf.VALUE_BOOL: self._conf.set_bool(data, bool(value)) elif val.type == gconf.VALUE_INT: self._conf.set_int(data, int(value)) elif val.type == gconf.VALUE_STRING: self._conf.set_string(data, value) settings_now_in_db = False else: if not utils.HAS_GCONF: logging.error("Setting used to be in gconf, but gconf is now missing. Loading defaults") settings_now_in_db = True self.set_setting(BOOL, 'settings_in_db', settings_now_in_db, force_db=True) def get_setting(self, type, datum, default=None, force_db=False): if utils.HAS_GCONF and self._initializing_db: logging.debug("we are initing db, returning and setting default: %s %s" % (datum, str(default))) return default #always return default, gconf LIES if utils.HAS_GCONF and datum[0] == '/' and not force_db: if type == BOOL: retval = self._conf.get_bool(datum) elif type == INT: retval = self._conf.get_int(datum) elif type == STRING: retval = self._conf.get_string(datum) if retval is not None: return retval return default else: self._db_execute(self._c, u'SELECT value FROM settings WHERE data=?',(datum,)) retval = self._c.fetchone() if retval is not None: if type == BOOL: return bool(int(retval[0])) elif type == INT: return int(retval[0]) elif type == STRING: return str(retval[0]) return retval[0] return default def set_setting(self, type, datum, value, force_db=False): if utils.HAS_GCONF and datum[0] == '/' and not force_db: if type == BOOL: self._conf.set_bool(datum, value) elif type == INT: self._conf.set_int(datum, value) elif type == STRING: self._conf.set_string(datum, value) else: current_val = self.get_setting(type, datum) if current_val is None: self._db_execute(self._c, u'INSERT INTO settings (data, value) VALUES (?,?)', (datum, value)) else: self._db_execute(self._c, u'UPDATE settings SET value=? WHERE data=?', (value,datum)) self._db.commit() if self._change_setting_cb is not None: self._change_setting_cb(type, datum, value) def set_feed_cache(self, cachelist): """Cachelist format: rowid, flag, unread, total""" try: fd = open(os.path.join(self.home, 'feed_cache.pickle'), 'w') pickle.dump(cachelist, fd) except: logging.warning("Couldn't create feed_cache.pickle.") return #self._db_execute(self._c, u'UPDATE feeds SET flag_cache=?, unread_count_cache=?, entry_count_cache=?, first_entry_cache=? WHERE rowid=?',\ #(cache[1], cache[2], cache[3], cache[4], cache[0])) #self._db_execute(self._c, u'UPDATE feeds SET unread_count_cache=? WHERE rowid=?',(cache[2],cache[0])) #self._db_execute(self._c, u'UPDATE feeds SET entry_count_cache=? WHERE rowid=?',(cache[3],cache[0])) #self._db.commit() #and only then... self.set_setting(BOOL, "feed_cache_dirty", False) self.cache_dirty = False def get_feed_cache(self): if self.cache_dirty: logging.debug("Feed cache is dirty, returning empty set") return None try: fd = open(os.path.join(self.home, 'feed_cache.pickle'), 'r') cache = pickle.load(fd) except: logging.warning("error loading feed_cache.pickle ") return None #self._db_execute(self._c, u'SELECT rowid, flag_cache, unread_count_cache, entry_count_cache, pollfail, first_entry_cache FROM feeds ORDER BY UPPER(TITLE)') #cache = self._c.fetchall() self.set_setting(BOOL, "feed_cache_dirty", True) self.cache_dirty=True return cache def insertURL(self, url, title=None): #if a feed with that url doesn't already exists, add it self._db_execute(self._c, """SELECT url FROM feeds WHERE url=?""",(url,)) #on success, fetch will return the url itself if self._c.fetchone() != (url,): if title is not None: self._db_execute(self._c, u"""INSERT INTO feeds (title,url,pollfail,pollfreq,lastpoll,newatlast,flags,feed_pointer,image) VALUES (?, ?,0, 1800,0,0,0,-1,"")""", (title,url)) #default 30 minute polling else: self._db_execute(self._c, u"""INSERT INTO feeds (title,url,pollfail,pollfreq,lastpoll,newatlast,flags,feed_pointer,image) VALUES (?, ?,0, 1800,0,0,0,-1,"")""", (url,url)) #default 30 minute polling self._db.commit() #self._db_execute(self._c, u"""SELECT rowid,url FROM feeds WHERE url=?""",(url,)) self._db_execute(self._c, "SELECT last_insert_rowid()") feed_id = self._c.fetchone()[0] d={ 'title':_("Waiting for first poll"), 'description':_("This feed has not yet been polled successfully. There might be an error with this feed.
"+str(title)), } self._db_execute(self._c, u'INSERT INTO entries (feed_id, title, creator, description, read, fakedate, date, guid, link, keep) VALUES (?, ?, NULL, ?, ?, 0, ?, ?, "http://", 0)',(feed_id,d['title'],d['description'],'0', int(time.time()), int(time.time()))) self._db.commit() else: self._db_execute(self._c, """SELECT rowid FROM feeds WHERE url=?""",(url,)) feed_id = self._c.fetchone() feed_id = feed_id[0] logging.info("db: feed already exists") raise FeedAlreadyExists(feed_id) return feed_id def add_feed_filter(self, pointed_feed_id, filter_name, query): #self._db_execute(self._c, u'SELECT rowid,feed_pointer,description FROM feeds WHERE feed_pointer=? AND description=?',(pointed_feed_id,query)) #result = self._c.fetchone() #if result is None: import random s = hashlib.sha1() #this is lame I know. We shouldn't ever get a collision here though! s.update(filter_name+query+str(random.getrandbits(32))) self._db_execute(self._c, u'INSERT INTO feeds (title,url,feed_pointer,description,pollfail,pollfreq,lastpoll,newatlast,flags) VALUES (?, ?,?,?, 0,21600,0,0,0)', (filter_name,s.hexdigest(),pointed_feed_id,query)) self._db.commit() self._db_execute(self._c, "SELECT last_insert_rowid()") return self._c.fetchone()[0] #else: # raise FeedAlreadyExists, result[0] def set_feed_filter(self, pointer_feed_id, filter_name, query): self._db_execute(self._c, u'SELECT feed_pointer FROM feeds WHERE rowid=?',(pointer_feed_id,)) pointed_id = self._c.fetchone() if pointed_id is None: raise NoFeed, pointer_feed_id pointed_id = pointed_id[0] self._db_execute(self._c, u'SELECT rowid FROM feeds WHERE feed_pointer=? AND description=?',(pointed_id,query)) #result = self._c.fetchone() #if result is None: self._db_execute(self._c, u'UPDATE feeds SET title=?, description=? WHERE rowid=?',(filter_name, query, pointer_feed_id)) self._db.commit() #else: # raise FeedAlreadyExists, result[0] def delete_feed(self, feed_id): #check for valid entry self._db_execute(self._c, """SELECT rowid FROM feeds WHERE rowid=?""",(feed_id,)) result = self._c.fetchone()[0] if result != feed_id: raise NoFeed,feed_id #delete the feed, its entries, and its media (this does not delete files) self._db_execute(self._c, """DELETE FROM feeds WHERE rowid=?""",(feed_id,)) self._reindex_feed_list.append(feed_id) self._db_execute(self._c, u'DELETE FROM tags WHERE feed_id=?',(feed_id,)) self._db.commit() #result = self._c.fetchone() #print(result) self._icon_manager.remove_icon(feed_id) self._db_execute(self._c, 'SELECT rowid FROM entries WHERE feed_id=?',(feed_id,)) data=self._c.fetchall() if data: dataList = [list(row) for row in data] for datum in dataList: if self._image_cache is not None: self._image_cache.remove_cache(datum[0]) self._db_execute(self._c, 'SELECT rowid FROM media WHERE entry_id=?',(datum[0],)) media=self._c.fetchall() if media: mediaList = [list(row) for row in media] for medium in mediaList: self.delete_media(int(medium[0])) self._db.commit() self._db_execute(self._c, 'DELETE FROM media WHERE entry_id=?',(datum[0],)) self._reindex_entry_list.append(datum[0]) self._db_execute(self._c, """DELETE FROM entries WHERE feed_id=?""",(feed_id,)) self._db.commit() def delete_media(self, media_id): media = self.get_media(media_id) try: #if it doesn't even have a 'file' key then return if media['file']==None: return except: return try: if os.path.isfile(media['file']): os.remove(media['file']) elif os.path.isdir(media['file']): #could be a dir if it was a bittorrent download utils.deltree(media['file']) except os.error, detail: logging.error("Error deleting: "+str(detail)) #but keep going in case the dirs are empty now try: #now check to see if we should get rid of the dated dir import glob globlist = glob.glob(os.path.split(media['file'])[0]+"/*") if len(globlist)==1 and os.path.split(globlist[0])[1]=="playlist.m3u": #if only the playlist is left, we're done utils.deltree(os.path.split(media['file'])[0]) if len(globlist)==0: #similarly, if dir is empty, we're done. utils.deltree(os.path.split(media['file'])[0]) except os.error, detail: logging.error("Error deleting dirs: "+str(detail)) #if everything worked, set status self.set_media_download_status(media_id,D_NOT_DOWNLOADED) def delete_bad(self): self._db_execute(self._c, """DELETE FROM feeds WHERE title IS NULL""") self._db.commit() def poll_multiple(self, arguments=0, feeds=None): """Polls multiple feeds multithreadedly""" successes=[] cur_time = int(time.time()) self._cancel_poll_multiple = False if feeds is None: if arguments & A_AUTOTUNE and arguments & A_ALL_FEEDS == 0: self._db_execute(self._c, 'SELECT rowid FROM feeds WHERE (? - lastpoll) >= pollfreq ORDER BY pollfreq', (cur_time,)) elif arguments & A_ERROR_FEEDS: self._db_execute(self._c, 'SELECT rowid FROM feeds WHERE pollfail=1 ORDER BY pollfreq') else: #polling all self._db_execute(self._c, 'SELECT rowid FROM feeds ORDER BY pollfreq') data=self._c.fetchall() if data: feeds = [row[0] for row in data] else: self.polling_callback((-1, [], 0), False) return 0 #don't renice on hildon because we can't renice #back down to zero again #if not utils.RUNNING_HILDON: # os.nice(2) threadcount = 5 if utils.RUNNING_HILDON or utils.RUNNING_SUGAR: threadcount = 2 pool = ThreadPool.ThreadPool(threadcount,"ptvDB", lucene_compat = utils.HAS_LUCENE) self._parse_list = [] for feed in feeds: if self._cancel_poll_multiple or self._exiting: break self._db_execute(self._c, u'SELECT feed_pointer FROM feeds WHERE rowid=?',(feed,)) result = self._c.fetchone()[0] if result >= 0: self._parse_list.append((feed, arguments, len(feeds), -2)) continue self._db_execute(self._c, """SELECT url,etag FROM feeds WHERE rowid=?""",(feed,)) data = self._c.fetchone() pool.queueTask(self._pool_poll_feed,(feed,arguments,len(feeds), data),self._poll_mult_cb) polled = 0 total = 0 #grow the cache while we do this operation #self._db_execute(self._c, 'PRAGMA cache_size=6000') while polled < len(feeds): if self._cancel_poll_multiple or self._exiting: break if len(self._parse_list) > 0: polled+=1 feed_id, args, total, parsed = self._parse_list.pop(0) self.polling_callback(self._process_feed(feed_id, args, total, parsed)) gc.collect() time.sleep(.1) #self._db_execute(self._c, 'PRAGMA cache_size=2000') if self._cancel_poll_multiple: self._parse_list = [] #pass dummy poll result, send cancel signal self.polling_callback((-1, [], total), True) else: # no need for manual join while pool.getTaskCount()>0: #manual joinAll so we can check for exit if self._exiting: pool.joinAll(False, True) del pool self._c.close() self._db.close() #if not utils.RUNNING_HILDON: # os.nice(-2) return total time.sleep(.5) pool.joinAll(False,True) #just to make sure I guess del pool self.reindex() if not self._exiting: self.cache_images() self._cancel_poll_multiple = False gc.collect() #if not utils.RUNNING_HILDON: # os.nice(-2) return total def interrupt_poll_multiple(self): self._cancel_poll_multiple = True def _poll_mult_cb(self, args): feed_id, args, total, parsed = args self._parse_list.append((feed_id, args, total, parsed)) def _pool_poll_feed(self, args): feed_id, arguments, total, data = args url,etag=data #save ram by not piling up polled data if utils.RUNNING_SUGAR or utils.RUNNING_HILDON: parse_list_limit = 10 else: parse_list_limit = 50 while len(self._parse_list) > parse_list_limit and not self._exiting: time.sleep(1) if self._exiting: return (feed_id, arguments, total, -1) try: import feedparser #feedparser.disableWellFormedCheck=1 #do we still need this? it used to cause crashes #speed up feedparser #must sanitize because some feeds have POPUPS! if utils.RUNNING_SUGAR: #feedparser._sanitizeHTML = lambda a, b: a feedparser._resolveRelativeURIs = lambda a, b, c: a if arguments & A_IGNORE_ETAG == A_IGNORE_ETAG: data = feedparser.parse(url) else: data = feedparser.parse(url,etag) return (feed_id, arguments, total, data) except Exception, e: logging.error(str(e)) return (feed_id, arguments, total, -1) def _process_feed(self,feed_id, args, total, data, recurse=0): """a wrapper function that returns the index along with the result so we can sort. Each poller needs its own db connection for locking reasons""" self._db_execute(self._c, u'SELECT lastpoll FROM feeds WHERE rowid=?', (feed_id,)) last_poll_time = self._c.fetchone()[0] poll_arguments = 0 result = 0 try: #poll_arguments = args[1] if self._exiting: return (feed_id,{'ioerror':None, 'pollfail':False}, total) result, new_entryids, mod_entryids = self.poll_feed(feed_id, args | A_POOLED_POLL, preparsed=data) if self._exiting: return (feed_id,{'ioerror':None, 'pollfail':False}, total) except sqlite.OperationalError, e: logging.warning("Database warning..." + str(e)) if recurse < 2: time.sleep(5) logging.warning("trying again...") self._db.close() self._db=sqlite.connect(os.path.join(self.home,"penguintv4.db"), timeout=30, isolation_level="IMMEDIATE") self._c = self._db.cursor() return self._process_feed(feed_id, args, total, data, recurse+1) #and reconnect logging.warning("can't get lock, giving up") return (feed_id,{'pollfail':True}, total) except FeedPollError,e: #print "feed poll error", logging.warning(str(e)) return (feed_id,{'pollfail':True}, total) except IOError, e: #print "io error", logging.warning(str(e)) #we got an ioerror, but we won't take it out on the feed return (feed_id,{'ioerror':e, 'pollfail':False}, total) except: logging.warning("other error polling feed:" + str(feed_id)) exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s logging.error(error_msg) return (feed_id,{'pollfail':True}, total) #assemble our handy dictionary while we're in a thread update_data={} if result > 0: update_data['first_poll'] = last_poll_time == 0 update_data['new_entries'] = result update_data['new_entryids'] = new_entryids update_data['mod_entryids'] = mod_entryids if self.is_feed_filter(feed_id): entries = self.get_entrylist(feed_id) #reinitialize filtered_entries dict update_data['unread_count'] = self.get_unread_count(feed_id) flag_list = self.get_entry_flags(feed_id) update_data['pollfail']=self.get_feed_poll_fail(self._resolve_pointed_feed(feed_id)) else: self._db_execute(self._c, u'SELECT read FROM entries WHERE feed_id=?',(feed_id,)) list = self._c.fetchall() update_data['unread_count'] = len([item for item in list if item[0]==0]) update_data['entry_count'] = len(list) flag_list = self.get_entry_flags(feed_id) if len(self.get_pointer_feeds(feed_id)) > 0: logging.info("have pointers, reindexing now") self.reindex() update_data['flag_list']=flag_list update_data['pollfail']=False update_data['no_changes'] = False elif result == 0: flag_list = self.get_entry_flags(feed_id) update_data['flag_list']=flag_list update_data['pollfail'] = False update_data['no_changes'] = True update_data['first_poll'] = False return (feed_id, update_data, total) def poll_feed_trap_errors(self, feed_id, callback): try: feed={} self._db_execute(self._c, "SELECT title,url FROM feeds WHERE rowid=?",(feed_id,)) result = self._c.fetchone() feed['feed_id']=feed_id feed['url']=result[1] feed['new_entries'], feed['new_entryids'], feed['mod_entryids'] = \ self.poll_feed(feed_id, A_IGNORE_ETAG+A_DO_REINDEX) callback(feed, True) except Exception, e:#FeedPollError,e: logging.warning(str(e)) logging.warning("error polling feed:") exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s logging.warning(error_msg) self.reindex() callback(feed, False) def _polling_callback(self, data, cancelled=False): print "look a callback" print data def poll_feed(self, feed_id, arguments=0, preparsed=None): """polls a feed and returns the number of new articles and a flag list. Optionally, one can pass a feedparser dictionary in the preparsed argument and avoid network operations""" def perform_feed_updates(updates, f_id): if not updates.has_key('pollfail'): updates['pollfail'] = 0 #logging.debug("setting pollfail to %i for %i" % (updates['pollfail'], f_id)) updated_fields = ", ".join(["%s=?" % k for k in updates.keys()]) updated_values = tuple([updates[k] for k in updates.keys()]) self._db_execute(self._c, u"""UPDATE feeds SET %s WHERE rowid=?""" % updated_fields, updated_values + (feed_id,)) self._db.commit() self._db_execute(self._c, u'SELECT feed_pointer, url, etag, image, title, link, flags, lastpoll, newatlast, pollfreq FROM feeds WHERE rowid=?', (feed_id,)) result = self._c.fetchone() feed = {} feed['feed_id'] = feed_id feed['feed_pointer'] = result[0] feed['url'] = result[1] feed['etag'] = result[2] feed['image'] = result[3] feed['title'] = result[4] feed['link'] = result[5] feed['flags'] = result[6] feed['last_time'] = result[7] feed['netatlast'] = result[8] feed['old_poll_freq'] = result[9] if preparsed is None: #feed_id = self._resolve_pointed_feed(feed_id) #self._db_execute(self._c, u'SELECT feed_pointer FROM feeds WHERE rowid=?',(feed_id,)) #result =self._c.fetchone() #if result: if feed['feed_pointer'] >= 0: return 0, [], [] #self._db_execute(self._c, """SELECT url,etag FROM feeds WHERE rowid=?""",(feed_id,)) #data = self._c.fetchone() try: import feedparser #feedparser.disableWellFormedCheck=1 #do we still need this? it used to cause crashes #speed up feedparser if utils.RUNNING_SUGAR or utils.RUNNING_HILDON: #feedparser._sanitizeHTML = lambda a, b: a feedparser._resolveRelativeURIs = lambda a, b, c: a if arguments & A_IGNORE_ETAG == A_IGNORE_ETAG: data = feedparser.parse(feed['url']) else: data = feedparser.parse(feed['url'], feed['etag']) except Exception, e: feed_updates = {} if arguments & A_AUTOTUNE == A_AUTOTUNE: feed_updates = self._set_new_update_freq(feed, 0) logging.warning("feedparser exception: %s" % str(e)) feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) #self._db.commit() perform_feed_updates(feed_updates, feed_id) logging.warning(str(e)) raise FeedPollError,(feed_id,"feedparser blew a gasket") else: if preparsed == -1: feed_updates = {} if arguments & A_AUTOTUNE == A_AUTOTUNE: feed_updates = self._set_new_update_freq(feed, 0) logging.warning("bad preparsed") feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) #self._db.commit() perform_feed_updates(feed_updates, feed_id) raise FeedPollError,(feed_id,"feedparser blew a gasket") elif preparsed == -2: #print "pointer feed, returning 0" return 0, [], [] else: #print "data is good" #need to get a url from somewhere data = preparsed try: url = data['feed']['title_detail']['base'] except: url = feed_id if data.has_key('status'): if data['status'] == 304: #this means "nothing has changed" feed_updates = {} if arguments & A_AUTOTUNE == A_AUTOTUNE: feed_updates = self._set_new_update_freq(feed, 0) #feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) #self._db.commit() perform_feed_updates(feed_updates, feed_id) return 0, [], [] if data['status'] == 404: #whoops feed_updates = {} if arguments & A_AUTOTUNE == A_AUTOTUNE: feed_updates = self._set_new_update_freq(feed, 0) feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) #self._db.commit() perform_feed_updates(feed_updates, feed_id) raise FeedPollError,(feed_id,"404 not found: "+str(url)) if len(data['feed']) == 0 or len(data['items']) == 0: #print data if data.has_key('bozo_exception'): if isinstance(data['bozo_exception'], URLError): e = data['bozo_exception'][0] #logging.debug(str(e)) errno = e[0] if errno in (#-2, # Name or service not known -3, #failure in name resolution 101, #Network is unreachable 114, #Operation already in progress 11): #Resource temporarily unavailable raise IOError(e) elif errno == -2: #could be no site, could be no internet try: #this really should work, right? #fixme: let's find a real way to test internet, hm? u = urllib.urlretrieve("http://www.google.com") except IOError, e2: raise IOError(e) feed_updates = {} if arguments & A_AUTOTUNE == A_AUTOTUNE: feed_updates = self._set_new_update_freq(feed, 0) feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) #self._db.commit() perform_feed_updates(feed_updates, feed_id) #logging.debug("empty: %s" % str(data)) raise FeedPollError,(feed_id,"empty feed") #else... feed_updates = {} #see if we need to get an image if not self._icon_manager.icon_exists(feed_id): href = self._icon_manager.download_icon(feed_id, data) if href is not None: #self._db_execute(self._c, u"""UPDATE feeds SET image=? WHERE rowid=?""",(href,feed_id)) feed_updates['image'] = href else: #self._db_execute(self._c, u"""SELECT image FROM feeds WHERE rowid=?""",(feed_id,)) #try: old_href = self._c.fetchone()[0] #except: old_href = "" if not self._icon_manager.is_icon_up_to_date(feed_id, feed['image'], data): self._icon_manager.remove_icon(feed_id) href = self._icon_manager.download_icon(feed_id, data) if href is not None: #self._db_execute(self._c, u"""UPDATE feeds SET image=? WHERE rowid=?""",(href,feed_id)) feed_updates['image'] = href if arguments & A_DELETE_ENTRIES == A_DELETE_ENTRIES: logging.info("deleting existing entries" + str(feed_id) + str(arguments)) self._db_execute(self._c, """DELETE FROM entries WHERE feed_id=?""",(feed_id,)) #self._db.commit() #to discover the old entries, first we mark everything as old #later, we well unset this flag for everything that is NEW, #MODIFIED, and EXISTS. anything still flagged should be deleted #self._db_execute(self._c, """UPDATE entries SET old=1 WHERE feed_id=?""",(feed_id,)) feed_updates['pollfail'] = 0 #self._db_execute(self._c, """UPDATE feeds SET pollfail=0 WHERE rowid=?""",(feed_id,)) #normalize results channel = data['feed'] if channel.has_key('description') == 0: channel['description']="" if len(channel['description']) > 128: channel['description'] = channel['description'][0:127] channel['description']=self._encode_text(channel['description']) if channel.has_key('title') == 0: if channel['description'] != "": channel['title']=channel['description'] else: channel['title']=url channel['title'] = self._encode_text(channel['title']) #print channel['title'] if not data.has_key('etag'): data['etag']='0' #if not data.has_key('modified'): # modified='0' #else: # modified = int(time.mktime(data['modified'])) try: #self._db_execute(self._c, u'SELECT title FROM feeds WHERE rowid=?',(feed_id,)) #exists=self._c.fetchone() if len(feed['title'])>4: #self._db_execute(self._c, """UPDATE feeds SET description=?, modified=?, etag=? WHERE rowid=?""", (channel['description'], modified,data['etag'],feed_id)) if feed['title'][0:4] == "http": #hack to detect when the title hasn't been set yet because of first poll feed_updates['title'] = channel['title'] #self._db_execute(self._c, """UPDATE feeds SET title=?, description=?, modified=?, etag=? WHERE rowid=?""", (channel['title'],channel['description'], modified,data['etag'],feed_id)) elif len(feed['title'])>0: #don't change title #self._db_execute(self._c, """UPDATE feeds SET description=?, modified=?, etag=? WHERE rowid=?""", (channel['description'], modified,data['etag'],feed_id)) if feed['title'] is None: feed_updates['title'] = channel['title'] #self._db_execute(self._c, """UPDATE feeds SET title=?, description=?, modified=?, etag=? WHERE rowid=?""", (channel['title'],channel['description'], modified,data['etag'],feed_id)) else: feed_updates['title'] = channel['title'] feed_updates['description'] = channel['description'] feed_updates['etag'] = data['etag'] #self._db_execute(self._c, """UPDATE feeds SET title=?, description=?, etag=? WHERE rowid=?""", (channel['title'],channel['description'], data['etag'],feed_id)) self._reindex_feed_list.append(feed_id) feed_updates['description'] = channel['description'] feed_updates['etag'] = data['etag'] except Exception, e: logging.warning(str(e)) feed_updates['pollfail'] = 1 #self._db_execute(self._c, """UPDATE feeds SET pollfail=1 WHERE rowid=?""",(feed_id,)) perform_feed_updates(feed_updates, feed_id) raise FeedPollError,(feed_id,"error updating title and description of feed") #self._db_execute(self._c, u'SELECT link FROM feeds WHERE rowid=?',(feed_id,)) #link = self._c.fetchone() #if link is not None: # link = link[0] #if there was no result, or result is None, it's blank if feed['link'] is None: feed['link'] = "" if feed['link'] == "" and data['feed'].has_key('link'): feed_updates['link'] = data['feed']['link'] #self._db_execute(self._c, u'UPDATE feeds SET link=? WHERE rowid=?',(data['feed']['link'],feed_id)) #self._db.commit() #populate the entries #only look as far back as 1000% for existing entries #existing_limit = int(len(data['items']) * 10) #print "only checking", existing_limit self._db_execute(self._c, #"""SELECT rowid,guid,link,title,description FROM entries WHERE feed_id=? ORDER BY fakedate DESC LIMIT %i""" % existing_limit, """SELECT rowid,guid,link,title,description,hash FROM entries WHERE feed_id=? ORDER BY fakedate DESC""", (feed_id,)) existing_entries = self._c.fetchall() #logging.debug("existing entries: %i" % len(existing_entries)) #print "got", len(existing_entries) #only use GUID if there are no dupes -- thanks peter's feed >-( guid_quality = 0.0 if len(existing_entries) > 0: guids = [e[1] for e in existing_entries] guids.sort() if len(guids[0]) > 2: #too short to be valuable prev_g = guids[0] dupe_count = 0.0 for g in guids[1:50]: #up to first 50 is fine if g == prev_g: dupe_count += 1.0 prev_g = g guid_quality = 1 - (dupe_count / len(existing_entries)) #we can't trust the dates inside the items for timing data. #Bad formats, no dates at all, and timezones screw things up #so I introduce a fake date which works for determining read and #unread article counts, and keeps the articles in order fake_time = int(time.time()) i=0 new_items = 0 flag_list = [] no_delete = [] new_entryids = [] mod_entryids = [] default_read = int(feed['flags'] & FF_MARKASREAD == FF_MARKASREAD) self._db_execute(self._c, u"""SELECT entry_id FROM media WHERE feed_id=?""", (feed_id,)) media_entries = self._c.fetchall() if media_entries is None: media_entries = [] else: media_entries = [r[0] for r in media_entries] #logging.debug("feed has %i items" % len(data['items'])) for item in data['items']: #do a lot of normalizing item['body'] = '' possible_bodies = [] #right now we look in the following places for the body, and take the longest one: #content, description, summary, summary_detail if item.has_key('content'): #ok so peter was right, possible_bodies.append(item['content'][0]['value']) if item.has_key('description'): #content_encoded is where we should be possible_bodies.append(item['description']) if item.has_key('summary'): #or the summary possible_bodies.append(item['summary']) if item.has_key('summary_detail'): possible_bodies.append(item['summary_detail']['value']) if len(possible_bodies): possible_bodies.sort(lambda x,y: len(y)-len(x)) item['body'] = possible_bodies[0] item['body']=self._encode_text(item['body']) if item['body'].count('<') > 5: #probably encoded body item['body'] = utils.html_entity_unfixer(item['body']) if item.has_key('title') == 0: item['title'] = "" if item['title']=="": item['title']=item['description'][0:100] html_begin = string.find(item['title'],'<') if html_begin >= 0 and html_begin < 5: #in case it _begins_ with html, and the html is really early #p = utils.StrippingParser() #p.feed(item['description']) ##p.cleanup() #p.close() #item['title']=p.result[0:35] desc = item['description'] #hack for hullabaloo desc = desc[:desc.find(" 5: #in case there's html within 100 chars... item['title']=item['title'][0:html_begin-1] #strip #things mess up if a title ends in a space, so strip trailing spaces #doublecheck if len(item['title'])==0: item['title']='untitled' else: item['title'] = item['title'].strip() try: #p = utils.StrippingParser() #p.feed(item['title']) ##p.cleanup() #p.close() #item['title'] = p.result item['title'] = STRIPPER_REGEX.sub('', item['title']) except: pass #let actual entities through, but correct unadorned &s. #thanks to http://www.regular-expressions.info/repeat.html#greedy #<[^>]+> -> &[^;]+; #I wrote: &.+?; which didn't work (matched widest results-- see reference) m = re.compile('&[^;]+;').search(item['title']) if m is not None: #entity found span = m.span() if span[1]-span[0] > 10: #unlikely to be an entity item['title'] = re.sub('&','&',item['title']) #else let it pass else: item['title'] = re.sub('&','&',item['title']) if type(item['body']) is str: item['body'] = unicode(item['body'],'utf-8') for uni in _common_unicode.keys(): item['body'] = item['body'].replace(uni, _common_unicode[uni]) item['title'] = self._encode_text(item['title']) for uni in _common_unicode.keys(): item['title'] = item['title'].replace(uni, _common_unicode[uni]) if item.has_key('creator') == 0: item['creator']="" if item.has_key('author') == 1: item['creator']=item['author'] if item.has_key('guid') == 0: item['id']=0 item['guid']='0' if item.has_key('link') == 0: item['link'] = "" item['creator']=self._encode_text(item['creator']) #blow away date_parsed with more recent times if item.has_key('updated_parsed'): item['date_parsed'] = item['updated_parsed'] elif item.has_key('modified_parsed'): item['date_parsed'] = item['modified_parsed'] elif item.has_key('created_parsed'): item['date_parsed'] = item['created_parsed'] elif item.has_key('update_parsed'): item['date_parsed'] = item['update_parsed'] if not item.has_key('date_parsed') or item['date_parsed'] is None: item['date_parsed']=time.localtime() entry_hash = self._get_hash(item['guid'], item['title'], item['body']) status = self._get_status(item, entry_hash, existing_entries, guid_quality, media_entries) if status[0]==NEW: new_items += 1 self._db_execute(self._c, u'INSERT INTO entries (feed_id, title, creator, description, read, fakedate, date, guid, link, keep, hash) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?)', (feed_id,item['title'],item['creator'],item['body'], default_read,fake_time-i, int(time.mktime(item['date_parsed'])), item['guid'],item['link'], entry_hash)) self._db_execute(self._c, "SELECT last_insert_rowid()") entry_id = self._c.fetchone()[0] if item.has_key('enclosures'): for media in item['enclosures']: media.setdefault('length', 0) media.setdefault('type', 'application/octet-stream') self._db_execute(self._c, u"""INSERT INTO media (entry_id, url, mimetype, download_status, viewed, keep, length, feed_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", (entry_id, media['url'], media['type'], D_NOT_DOWNLOADED, default_read, 0, media['length'], feed_id)) self._reindex_entry_list.append(entry_id) self._image_cache_list.append(entry_id) no_delete.append(entry_id) new_entryids.append(entry_id) elif status[0]==EXISTS: entry_id = status[1] no_delete.append(entry_id) elif status[0]==MODIFIED: entry_id = status[1] self._db_execute(self._c, u'UPDATE entries SET title=?, creator=?, description=?, date=?, guid=?, link=?, hash=? WHERE rowid=?', (item['title'],item['creator'],item['body'], int(time.mktime(item['date_parsed'])),item['guid'], item['link'], entry_hash, entry_id)) if self.entry_flag_cache.has_key(entry_id): del self.entry_flag_cache[entry_id] if item.has_key('enclosures'): #self._db_execute(self._c, u'SELECT url FROM media WHERE entry_id=? AND (download_status=? OR download_status=?)', # (entry_id,D_NOT_DOWNLOADED,D_ERROR)) self._db_execute(self._c, u'SELECT url FROM media WHERE entry_id=?', (entry_id,)) db_enc = self._c.fetchall() db_enc = [c_i[0] for c_i in db_enc] f_enc = [f_i['url'] for f_i in item['enclosures']] db_set = set(db_enc) f_set = set(f_enc) removed = list(db_set.difference(f_set)) added = list(f_set.difference(db_set)) if len(removed)>0: qmarks = "?,"*(len(removed)-1)+"?" self._db_execute(self._c, u'DELETE FROM media WHERE url IN ('+qmarks+') AND (download_status=? OR download_status=?)', tuple(removed)+(D_NOT_DOWNLOADED,D_ERROR)) #need to delete media that isn't in enclosures only and is not downloaded #need to add media that's in enclosures but not in db after that process if len(added) > 0: for media in item['enclosures']: #add the rest if media['url'] in added: #if dburl[0] != media['url']: #only add if that url doesn't exist media.setdefault('length', 0) media.setdefault('type', 'application/octet-stream') self._db_execute(self._c, u"""INSERT INTO media (entry_id, url, mimetype, download_status, viewed, keep, length, download_date, feed_id) VALUES (?, ?, ?, ?, ?, ?, ?, 0, ?)""", (entry_id, media['url'], media['type'], D_NOT_DOWNLOADED, default_read, 0, media['length'], feed_id)) self._db_execute(self._c, u'UPDATE entries SET read=0 WHERE rowid=?', (entry_id,)) self._reindex_entry_list.append(entry_id) self._image_cache_list.append(entry_id) no_delete.append(entry_id) mod_entryids.append(entry_id) i+=1 #don't call anything old that has media... self._db_execute(self._c, """SELECT entry_id FROM media WHERE download_status>0 AND feed_id=?""",(feed_id,)) result = self._c.fetchall() if result: #combine with EXISTing entries no_delete += [r[0] for r in result] # anything not set above as new, mod, or exists is no longer in # the xml and therefore could be deleted if we have more articles than # the limit self._db_execute(self._c, """SELECT count(*) FROM entries WHERE feed_id=?""",(feed_id,)) all_entries = self._c.fetchone()[0] nokeepdeleted = int(feed['flags'] & FF_NOKEEPDELETED == FF_NOKEEPDELETED) if nokeepdeleted: if len(no_delete) > 0: qmarks = "?,"*(len(no_delete)-1)+"?" self._db_execute(self._c, """DELETE FROM entries WHERE rowid NOT IN (%s) AND keep=0 AND feed_id=?""" % qmarks, tuple(no_delete) + (feed_id,)) ditchables = self._c.fetchall() else: self._db_execute(self._c, """DELETE FROM entries WHERE keep=0 AND feed_id=?""", (feed_id,)) ditchables = self._c.fetchall() elif MAX_ARTICLES > 0: if all_entries > MAX_ARTICLES: if len(no_delete) > 0: qmarks = "?,"*(len(no_delete)-1)+"?" self._db_execute(self._c, """SELECT rowid FROM entries WHERE rowid NOT IN (%s) AND keep=0 AND feed_id=? ORDER BY fakedate LIMIT ?""" % qmarks, tuple(no_delete) + (feed_id, all_entries - MAX_ARTICLES)) ditchables = self._c.fetchall() else: self._db_execute(self._c, """SELECT rowid FROM entries WHERE keep=0 AND feed_id=? ORDER BY fakedate LIMIT ?""", (feed_id, all_entries - MAX_ARTICLES)) ditchables = self._c.fetchall() if ditchables is not None: if len(ditchables) > 0: ditchables = tuple([r[0] for r in ditchables]) qmarks = "?,"*(len(ditchables)-1)+"?" self._db_execute(self._c, """DELETE FROM entries WHERE rowid IN (%s)""" % qmarks, ditchables) for e_id in ditchables: self._image_uncache_list.append(e_id) #delete pre-poll entry if feed['last_time'] == 0: self._db_execute(self._c, "DELETE FROM entries WHERE fakedate=0 AND feed_id=?",(feed_id,)) if arguments & A_AUTOTUNE == A_AUTOTUNE: result = self._set_new_update_freq(feed, new_items) feed_updates.update(result) else: cur_time = int(time.time()) feed_updates['lastpoll'] = cur_time #self._db_execute(self._c, u'UPDATE feeds SET lastpoll=? WHERE rowid=?',(cur_time,feed_id)) perform_feed_updates(feed_updates, feed_id) if arguments & A_POOLED_POLL == 0: if arguments & A_DO_REINDEX: if new_items > 0: self.reindex() self.cache_images() return (new_items, new_entryids, mod_entryids) def _set_new_update_freq(self, feed, new_items): """Based on previous feed history and number of items found, adjust the polling frequency. The goal is one item per poll. Right now the algorithm is: find new items per poll period. if it's zero (didn't find anything): increase the poll frequency by ratio of average polltime to our previous frequency if it's >1: set poll freq to now-last_poll / new_items_per_poll_period (ie if we got 1.5 items this past period, set poll freq to old_freq/1.5 if it's 1: jackpot, do nothing updates are never more often than 30 mins and never rarer than 4 hours """ feed_updates = {} #should never be called on a filtered feed cur_time = int(time.time()) #this could suck if the program was just started, so only do it if the poll_freq seems correct #however still update the db with the poll time feed_updates['lastpoll'] = cur_time feed_updates['newatlast'] = new_items if cur_time - feed['last_time'] < feed['old_poll_freq']/2: #too soon to get a good reading. return feed_updates #normalize dif: new_items = round(new_items * feed['old_poll_freq'] / (cur_time- feed['last_time'])) if new_items==0: #figure out the average time between article postings #this algorithm seems to be the most accurate based on my own personal judgment self._db_execute(self._c, 'SELECT date FROM entries WHERE feed_id=?',(feed['feed_id'],)) datelist = self._c.fetchall() datelist.append((int(time.time()),)) #helps in some cases to pretend we found one now i=0 list=[] for item in datelist[:-1]: diff=abs(datelist[i+1][0]-datelist[i][0]) list.append(diff) i=i+1 if len(list)>0: avg = sum(list)/len(list) else: avg=0 #increase the poll frequency by ratio of average polltime to our previous frequency modifier = avg / feed['old_poll_freq'] poll_freq = round(feed['old_poll_freq'] + modifier*60) elif new_items>1: poll_freq = floor((cur_time - feed['last_time']) / new_items) else: return feed_updates if poll_freq > 21600: #four hours poll_freq = 21600 if poll_freq < 1800: #30 mins poll_freq = 1800 feed_updates['pollfreq'] = poll_freq return feed_updates def _get_status(self, item, new_hash, existing_entries, guid_quality, media_entries): """returns status, the entry_id of the matching entry (if any), and the media list if unmodified""" ID=0 GUID=1 LINK=2 TITLE=3 BODY=4 HASH=5 entry_id=-1 t_item = {'guid': item['guid'], 'body': item['body'], 'link': item['link'], 'title': item['title']} #debug_i = 0 for entry_item in existing_entries: if guid_quality > 0.7: if str(entry_item[GUID]) == str(t_item['guid']): entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break elif guid_quality > 0.1: if str(entry_item[GUID]) == str(t_item['guid']): if entry_item[TITLE] == t_item['title']: entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break elif t_item['link'] != '': if entry_item[LINK] == t_item['link']: if entry_item[TITLE] == t_item['title']: entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break elif entry_item[BODY] == t_item['body']: entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break elif entry_item[TITLE] == t_item['title']: entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break elif entry_item[BODY] == t_item['body']: entry_id = entry_item[ID] old_hash = entry_item[HASH] #logging.debug("found match at %i (%f)" % (debug_i, debug_i / float(len(existing_entries)))) break #debug_i += 1 if entry_id == -1: return (NEW, -1, []) if new_hash == old_hash: #now check enclosures if entry_id not in media_entries: old_media = [] else: old_media = self.get_entry_media(entry_id) #if they are both zero, return if len(old_media) == 0 and item.has_key('enclosures') == False: return (EXISTS,entry_id, []) if item.has_key('enclosures'): #if lengths are different, return if len(old_media) != len(item['enclosures']): return (MODIFIED,entry_id, []) else: #if we had some, and now don't, return if len(old_media)>0: return (MODIFIED,entry_id, []) #we have two lists of the same, non-zero length #only now do we do the loops and sorts -- we need to test individual items existing_media = old_media old_media = [urlparse.urlparse(medium['url'])[:3] for medium in old_media] new_media = [urlparse.urlparse(m['url'])[:3] for m in item['enclosures']] old_media = utils.uniquer(old_media) old_media.sort() new_media = utils.uniquer(new_media) new_media.sort() if old_media != new_media: return (MODIFIED,entry_id,[]) return (EXISTS,entry_id, existing_media) else: #logging.debug("entry is modified") return (MODIFIED,entry_id, []) def get_entry_media(self, entry_id): self._db_execute(self._c, """SELECT rowid,entry_id,url,file,download_status,viewed,length,mimetype,errormsg FROM media WHERE entry_id = ? ORDER BY entry_id DESC""",(entry_id,)) dataList=self._c.fetchall() if dataList is None: return [] media_list=[] for datum in dataList: medium={} medium['url']=datum[2] #MAGIC medium['download_status']=int(datum[4]) #MAGIC try: medium['size']=int(datum[6]) #MAGIC except: medium['size']=0 medium['media_id']=int(datum[0]) #MAGIC medium['file']=datum[3] #MAGIC medium['entry_id']=datum[1] #MAGIC medium['viewed']=int(datum[5]) #MAGIC medium['mimetype']=datum[7] #MAGIC medium['errormsg']=datum[8] #MAGIC media_list.append(medium) return media_list def get_entry_media_block(self, entry_list): if len(entry_list) == 0: return qmarks = "?,"*(len(entry_list)-1)+"?" self._db_execute(self._c, """SELECT rowid,entry_id,url,file,download_status,viewed,length,mimetype,errormsg FROM media WHERE entry_id in ("""+qmarks+') ORDER BY download_date DESC',tuple(entry_list)) result = self._c.fetchall() if result is None: return [] media_dict = {} for datum in result: medium={} medium['url']=datum[2] #MAGIC medium['download_status']=int(datum[4]) #MAGIC try: medium['size']=int(datum[6]) #MAGIC except: medium['size']=0 medium['media_id']=int(datum[0]) #MAGIC medium['file']=datum[3] #MAGIC medium['entry_id']=datum[1] #MAGIC medium['viewed']=int(datum[5]) #MAGIC medium['mimetype']=datum[7] #MAGIC medium['errormsg']=datum[8] #MAGIC if not media_dict.has_key(medium['entry_id']): media_dict[medium['entry_id']] = [medium] else: media_dict[medium['entry_id']].append(medium) return media_dict def get_media(self, media_id): self._db_execute(self._c, u'SELECT url, download_status, length, file, entry_id, viewed, mimetype, feed_id, errormsg FROM media WHERE rowid=?',(media_id,)) datum=self._c.fetchone() if datum is None: return None medium={} medium['url']=datum[0] #MAGIC medium['download_status']=int(datum[1]) #MAGIC try: medium['size']=int(datum[2]) #MAGIC except: pass medium['media_id']=media_id medium['file']=datum[3] #MAGIC medium['entry_id']=datum[4] #MAGIC medium['viewed']=int(datum[5]) #MAGIC medium['mimetype']=datum[6] #MAGIC medium['feed_id']=datum[7] #MAGIC medium['errormsg']=datum[8] #MAGIC return medium def get_feed_media_count(self, feed_id): self._db_execute(self._c, u'SELECT count(*) FROM media WHERE feed_id=?',(feed_id,)) return self._c.fetchone()[0] def get_entry(self, entry_id, ajax_url=None): self._db_execute(self._c, """SELECT title, creator, link, description, feed_id, date, read, keep, guid, hash FROM entries WHERE rowid=? LIMIT 1""",(entry_id,)) result = self._c.fetchone() entry_dic={} try: entry_dic['title'] = result[0] entry_dic['creator'] = result[1] entry_dic['link'] = result[2] entry_dic['description']=result[3] entry_dic['feed_id']= result[4] entry_dic['date'] = result[5] entry_dic['read'] = result[6] entry_dic['keep'] = result[7] entry_dic['guid'] = result[8] entry_dic['hash'] = result[9] entry_dic['entry_id'] = entry_id except TypeError: #this error occurs when feed or item is wrong raise NoEntry, entry_id if self._image_cache is not None: entry_dic['description'] = self._image_cache.rewrite_html(str(entry_id), entry_dic['description'], ajax_url) return entry_dic def get_entry_block(self, entry_list, ajax_url=None): if len(entry_list) == 0: return [] qmarks = "?,"*(len(entry_list)-1)+"?" self._db_execute(self._c, u'SELECT title, creator, link, description, feed_id, date, read, rowid, keep, guid, hash FROM entries WHERE rowid in ('+qmarks+')', (tuple(entry_list))) result = self._c.fetchall() if result is None: return [] retval = [] for entry in result: entry_dic = {} entry_dic['title'] = entry[0] entry_dic['creator'] = entry[1] entry_dic['link'] = entry[2] entry_dic['description']=entry[3] entry_dic['feed_id']= entry[4] entry_dic['date'] = entry[5] entry_dic['read'] = entry[6] entry_dic['entry_id'] = entry[7] entry_dic['keep'] = entry[8] entry_dic['guid'] = entry[9] entry_dic['hash'] = entry[10] if self._image_cache is not None: entry_dic['description'] = self._image_cache.rewrite_html(str(entry_dic['entry_id']), entry_dic['description'], ajax_url) retval.append(entry_dic) return retval def get_entries_since(self, timestamp): self._db_execute(self._c, u'SELECT feed_id, rowid, hash, read FROM entries WHERE fakedate > ?', (timestamp,)) result = self._c.fetchall() if result is None: return [] else: return result def get_kept_entries(self, feed_id): self._db_execute(self._c, u'SELECT rowid FROM entries WHERE keep=1 AND feed_id=?', (feed_id,)) result = self._c.fetchall() if result is None: return [] else: return [r[0] for r in result] def get_filtered_entries(self, feed_index): """Assumes this is a feed pointer""" self._db_execute(self._c, u'SELECT feed_pointer,description FROM feeds WHERE rowid=?',(feed_index,)) result = self._c.fetchone() if result is None: self._filtered_entries[feed_index] = [] return [] if result[0] >= 0: pointed_feed = result[0] #this is where we perform a search s_entries = self.search(result[1],pointed_feed)[1] if len(s_entries)==0: self._filtered_entries[feed_index] = [] return [] s_entries.sort(lambda x,y: int(y[2] - x[2])) entries = [] #gonna be slow :( for entry_id,title, fakedate, feed_id in s_entries: self._db_execute(self._c, """SELECT read FROM entries WHERE rowid=? LIMIT 1""",(entry_id,)) try: readinfo = self._c.fetchone()[0] except: logging.info("error in search results, reindexing") readinfo = 0 self.reindex() entries.append([entry_id, title, fakedate, readinfo, feed_id]) self._filtered_entries[feed_index] = entries return entries else: logging.error("programming error: tried to get filter information from non-filter feed") assert False def get_entrylist(self, feed_index): if self.is_feed_filter(feed_index): return self.get_filtered_entries(feed_index) self._db_execute(self._c, """SELECT rowid,title,fakedate,read,feed_id FROM entries WHERE feed_id=? ORDER BY fakedate DESC""",(feed_index,)) result = self._c.fetchall() if result=="": raise NoFeed, feed_index return result def get_first_entry_title(self, feed_id, strip_newlines=False): """returns title of first entry""" if self.is_feed_filter(feed_id): if not self._filtered_entries.has_key(feed_id): self.get_filtered_entries(feed_id) for entry in self._filtered_entries[feed_id]: entry_id, title, fakedate, read, f_id = entry if read == 0: if strip_newlines: return title.replace("\n"," ") return title if len(self._filtered_entries[feed_id]) == 0: return "" if strip_newlines: return self._filtered_entries[feed_id][0][1].replace("\n"," ") return self._filtered_entries[feed_id][0][1] self._db_execute(self._c, """SELECT title FROM entries WHERE feed_id=? ORDER BY fakedate DESC LIMIT 1""",(feed_id,)) result = self._c.fetchone() if result is None: raise NoFeed, feed_id if strip_newlines: return result[0].replace("\n"," ") return result[0] def get_entry_count(self, feed_id): self._db_execute(self._c, u'SELECT count(*) FROM entries WHERE feed_id=?', (feed_id,)) return self._c.fetchone()[0] def get_feedlist(self): self._db_execute(self._c, """SELECT rowid,title,url FROM feeds ORDER BY UPPER(title)""") result = self._c.fetchall() dataList = [] if result: dataList = [list(row) for row in result] else: result=[] return dataList def get_feed_id_by_url(self, url): self._db_execute(self._c, """SELECT rowid FROM feeds WHERE url=?""",(url,)) try: result = self._c.fetchone()[0] except TypeError: return -1 return result def get_feed_title(self, feed_index): self._db_execute(self._c, """SELECT title FROM feeds WHERE rowid=?""",(feed_index,)) try: result = self._c.fetchone()[0] except TypeError: raise NoFeed, feed_index #don't return a tuple return result #self.decode_text(result) def get_feed_image(self, feed_id): self._db_execute(self._c, u'SELECT image FROM feeds WHERE rowid=?', (feed_id,)) try: return self._c.fetchone()[0] except: return None def get_feed_info(self, feed_id): self._db_execute(self._c, """SELECT title, description, url, link, feed_pointer, lastpoll, pollfreq FROM feeds WHERE rowid=?""",(feed_id,)) try: result = self._c.fetchone() d = {'title':result[0], 'description':result[1], 'url':result[2], 'link':result[3], 'feed_pointer':result[4], 'lastpoll':result[5], 'pollfreq':result[6]} parts=urlparse.urlsplit(result[2]) usernameandpassword, domain=urllib.splituser(parts[1]) #username, password=urllib.splitpasswd(usernameandpassword) if usernameandpassword is None: d['auth_feed'] = False else: d['auth_feed'] = True d['auth_userpass'] = usernameandpassword d['auth_domain'] = domain return d except TypeError: raise NoFeed, feed_id return result def set_feed_name(self, feed_id, name): logging.warning("need to rename media dirs to represent new name") name = self._encode_text(name) if name is not None: self._db_execute(self._c, u'UPDATE feeds SET title=? WHERE rowid=?',(name,feed_id)) self._db.commit() else: self._db_execute(self._c, """SELECT url FROM feeds WHERE rowid=?""",(feed_id,)) url=self._c.fetchone()[0] try: import feedparser feedparser.disableWellFormedCheck=1 data = feedparser.parse(url) except: return channel=data['feed'] if channel.has_key('title') == 0: if channel['description'] != "": channel['title']=channel['description'] else: channel['title']=url channel['title'] = self._encode_text(channel['title']) #logging.debug("got title from feed: %s" % channel['title']) self._db_execute(self._c, u'UPDATE feeds SET title=? WHERE rowid=?',(channel['title'],feed_id)) self._db.commit() self._reindex_feed_list.append(feed_id) self.reindex() def set_feed_url(self, feed_id, url): try: self._db_execute(self._c, u'UPDATE feeds SET url=? WHERE rowid=?',(url,feed_id)) self._db.commit() except sqlite.IntegrityError: raise FeedAlreadyExists,feed_id def set_feed_link(self, feed_id, link): self._db_execute(self._c, u'UPDATE feeds SET link=? WHERE rowid=?',(link,feed_id)) self._db.commit() def set_media(self, media_id, status=None, filename=None, size=None): assert media_id is not None update_str = u'UPDATE media SET ' update_data = () if status is not None: update_str += u'download_status=?, download_date=?, ' update_data += (status, int(time.time())) if filename is not None: update_str += u'file=?, ' update_data += (filename,) if size is not None: update_str += u'length=?, ' update_data += (int(size),) assert len(update_data) > 0 update_str = update_str[:-2] + u'WHERE rowid=?' update_data += (media_id,) self._db_execute(self._c, update_str, update_data) self._db.commit() def set_media_download_status(self, media_id, status, errormsg=""): errormsg = unicode(errormsg) if status == D_DOWNLOADED: self._db_execute(self._c, u'UPDATE media SET download_status=?, download_date=?, errormsg=? WHERE rowid=?', (status, int(time.time()),errormsg,media_id)) self._db.commit() else: self._db_execute(self._c, u'UPDATE media SET download_status=?, errormsg=? WHERE rowid=?', (status,errormsg,media_id)) self._db.commit() self._db_execute(self._c, u'SELECT entry_id FROM media WHERE rowid=?',(media_id,)) entry_id = self._c.fetchone()[0] if self.entry_flag_cache.has_key(entry_id): del self.entry_flag_cache[entry_id] def set_media_filename(self, media_id, filename): self._db_execute(self._c, u'UPDATE media SET file=? WHERE rowid=?', (filename,media_id)) self._db.commit() def set_media_viewed(self, media_id, viewed, entry_id=None): self._db_execute(self._c, u'UPDATE media SET viewed=? WHERE rowid=?',(int(viewed),media_id)) self._db.commit() if entry_id is None: self._db_execute(self._c, u'SELECT entry_id FROM media WHERE rowid=?',(media_id,)) entry_id = self._c.fetchone()[0] if self.entry_flag_cache.has_key(entry_id): del self.entry_flag_cache[entry_id] if viewed==1:#check to see if this makes the whole entry viewed if self.get_entry_keep(entry_id): return self._db_execute(self._c, u'SELECT viewed FROM media WHERE entry_id=?',(entry_id,)) list = self._c.fetchall() if list: for v in list: if v==0: #still some unviewed return #else self.set_entry_read(entry_id, 1) else: #mark as unviewed by default self.set_entry_read(entry_id, 0) def get_media_size(self, media_id): self._db_execute(self._c, u'SELECT length FROM media WHERE rowid=?',(media_id,)) return self._c.fetchone()[0] def set_media_size(self, media_id, size): self._db_execute(self._c, u'UPDATE media SET length=? WHERE rowid=?',(int(size),media_id)) self._db.commit() def set_entry_read(self, entry_id, read): self._db_execute(self._c, u'UPDATE entries SET read=? WHERE rowid=?',(int(read),entry_id)) self._db_execute(self._c, u'UPDATE media SET viewed=? WHERE entry_id=?',(int(read),entry_id)) self._db.commit() if self.entry_flag_cache.has_key(entry_id): del self.entry_flag_cache[entry_id] def set_entry_keep(self, entry_id, keep): self._db_execute(self._c, u'UPDATE entries SET keep=? WHERE rowid=?',(int(keep),entry_id)) if keep: self._db_execute(self._c, u'UPDATE entries SET read=0 WHERE rowid=?',(entry_id,)) self._db_execute(self._c, u'UPDATE media SET viewed=0 WHERE entry_id=?',(entry_id,)) self._db.commit() if self.entry_flag_cache.has_key(entry_id): del self.entry_flag_cache[entry_id] def get_entry_keep(self, entry_id): self._db_execute(self._c, u'SELECT keep FROM entries WHERE rowid=? LIMIT 1',(entry_id,)) retval = self._c.fetchone()[0] return int(retval) def set_entrylist_read(self, entrylist, read): if len(entrylist) == 0: return l = [str(e) for e in entrylist] subset = [] while len(l) > 0: subset = l[:900] qmarks = "?,"*(len(subset)-1)+"?" self._db_execute(self._c, u'UPDATE entries SET read=? WHERE rowid IN ('+qmarks+')', (int(read),)+tuple(subset)) self._db_execute(self._c, u'UPDATE media SET viewed=? WHERE entry_id IN ('+qmarks+')',(int(read),)+tuple(subset)) self._db.commit() l = l[900:] for e in entrylist: if self.entry_flag_cache.has_key(e): del self.entry_flag_cache[e] def get_entry_read(self, entry_id): self._db_execute(self._c, u'SELECT read FROM entries WHERE rowid=? LIMIT 1',(entry_id,)) retval = self._c.fetchone()[0] return int(retval) def clean_media_status(self): self._db_execute(self._c, u'UPDATE media SET download_status=? WHERE download_status<1',(D_NOT_DOWNLOADED,)) self._db_execute(self._c, u'UPDATE media SET download_status=? WHERE download_status=1',(D_RESUMABLE,)) self._db_execute(self._c, u'UPDATE media SET download_status=? WHERE download_status=? AND file is NULL',(D_NOT_DOWNLOADED, D_DOWNLOADED)) self._db_execute(self._c, u'UPDATE media SET errormsg=""') self._db.commit() def get_entryid_for_media(self, media_id): self._db_execute(self._c, u'SELECT entry_id FROM media WHERE rowid=? LIMIT 1',(media_id,)) ret = self._c.fetchone() return ret[0] def get_media_for_download(self, resume_paused = True): if resume_paused: self._db_execute(self._c, u'SELECT rowid, length, entry_id, feed_id FROM media WHERE (download_status=? OR download_status==?) AND viewed=0',(D_NOT_DOWNLOADED,D_RESUMABLE)) else: self._db_execute(self._c, u'SELECT rowid, length, entry_id, feed_id FROM media WHERE download_status=? AND viewed=0',(D_NOT_DOWNLOADED,)) list=self._c.fetchall() self._db_execute(self._c, u'SELECT rowid, length, entry_id, feed_id FROM media WHERE download_status=?',(D_ERROR,)) list=list+self._c.fetchall() newlist=[] for item in list: try: size = int(item[1]) except ValueError: #try _this_! try: size = int(''.join([b for b in item[1] if b.isdigit()])) except: size = 0 new_item = (item[0],size,item[2], item[3]) newlist.append(new_item) if self.entry_flag_cache.has_key(item[2]): del self.entry_flag_cache[item[2]] #build a list of feeds that do not include the noautodownload flag feeds = [l[3] for l in newlist] feeds = utils.uniquer(feeds) good_feeds = [f for f in feeds if self.get_flags_for_feed(f) & FF_NOAUTODOWNLOAD == 0] newlist = [l for l in newlist if l[3] in good_feeds] return newlist def get_deletable_media(self): no_expire = self.get_feeds_for_flag(FF_NOAUTOEXPIRE) if len(no_expire) > 0: qmarks = "?,"*(len(no_expire)-1)+"?" self._db_execute(self._c, u'SELECT media.rowid, media.entry_id, media.feed_id, media.file, media.download_date FROM media INNER JOIN entries ON media.entry_id = entries.rowid WHERE entries.keep=0 AND media.download_status=2 AND media.feed_id not in ('+qmarks+') ORDER BY media.viewed DESC, media.download_date', tuple(no_expire)) else: self._db_execute(self._c, u'SELECT media.rowid, media.entry_id, media.feed_id, media.file, media.download_date FROM media INNER JOIN entries ON media.entry_id = entries.rowid WHERE entries.keep=0 AND media.download_status=2 ORDER BY media.viewed DESC, media.download_date') result = self._c.fetchall() if result: return [[r[0],r[1],r[2],r[3],long(r[4])] for r in result] return [] def get_resumable_media(self): self._db_execute(self._c, u'SELECT rowid, file, entry_id, feed_id FROM media WHERE download_status=?',(D_RESUMABLE,)) list = self._c.fetchall() dict_list = [] dict = {} for item in list: dict = {} dict['media_id'] = item[0] dict['file'] = item[1] dict['entry_id'] = item[2] dict['feed_id'] = item[3] dict_list.append(dict) return dict_list def mark_feed_as_viewed(self, feed_id): """marks a feed's entries and media as viewed. If there's a way to do this all in sql, I'd like to know""" if self.is_feed_filter(feed_id): if not self._filtered_entries.has_key(feed_id): self.get_filtered_entries(feed_id) changed_list = [] list = [] for entry in self._filtered_entries[feed_id]: self._db_execute(self._c, u'UPDATE entries SET read=1 WHERE rowid=? AND read=0 AND keep=0',(entry[0],)) self._db_execute(self._c, u'SELECT rowid, download_status FROM media WHERE entry_id=?',(entry[0],)) list = list+self._c.fetchall() feed_id = self._resolve_pointed_feed(feed_id) else: #feed_id = self._resolve_pointed_feed(feed_id) self._db_execute(self._c, u'SELECT rowid FROM entries WHERE feed_id=? AND read=0 AND keep=0',(feed_id,)) changed_list = self._c.fetchall() self._db_execute(self._c, u'UPDATE entries SET read=1 WHERE feed_id=? AND read=0 AND keep=0',(feed_id,)) self._db_execute(self._c, u'SELECT media.rowid, media.download_status FROM media INNER JOIN entries ON media.entry_id = entries.rowid WHERE entries.keep=0 AND media.feed_id = ?',(feed_id,)) list = self._c.fetchall() if len(list) > 0: qmarks = "?,"*(len(list)-1)+"?" idlist = [l[0] for l in list] self._db_execute(self._c, u'UPDATE media SET viewed=1 WHERE rowid IN ('+qmarks+')', tuple(idlist)) #for item in list: # self._db_execute(self._c, u'UPDATE media SET viewed=? WHERE rowid=? AND viewed=0',(1,item[0])) # if item[1] == D_ERROR: # self._db_execute(self._c, u'UPDATE media SET download_status=? WHERE rowid=?', (D_NOT_DOWNLOADED,item[0])) self._db.commit() changed_list = [r[0] for r in changed_list] for item in changed_list: if self.entry_flag_cache.has_key(item): del self.entry_flag_cache[item] return changed_list def media_exists(self, filename): self._db_execute(self._c, u'SELECT count(*) FROM media WHERE media.file=?',(filename,)) count = self._c.fetchone()[0] if count>1: logging.warning("multiple entries in db for one filename") if count==0: return False return True def get_unplayed_media(self, set_viewed=False): """media_id, entry_id, feed_id, file, entry_title, feed_title 0 1 2 3 4 5""" self._db_execute(self._c, u'SELECT media.rowid, media.entry_id, media.feed_id, media.file, entries.title FROM media INNER JOIN entries ON media.entry_id = entries.rowid WHERE media.download_status=? AND media.viewed=0',(D_DOWNLOADED,)) list=self._c.fetchall() playlist=[] if set_viewed: for item in list: self._db_execute(self._c, u'UPDATE media SET viewed=1 WHERE rowid=?',(item[0],)) self._db_execute(self._c, u'UPDATE entries SET read=1 WHERE rowid=?',(item[1],)) if self.entry_flag_cache.has_key(item[1]): del self.entry_flag_cache[item[1]] playlist.append(item) self._db.commit() else: playlist = list retval = [] for row in playlist: feed_title = self.get_feed_title(row[2]) retval.append(row+(feed_title,)) return retval def pause_all_downloads(self): self._db_execute(self._c, u'SELECT entry_id FROM media WHERE download_status=?',(D_DOWNLOADING,)) list = self._c.fetchall() list = utils.uniquer(list) if list: for e in list: if self.entry_flag_cache.has_key(e[0]): del self.entry_flag_cache[e[0]] self._db_execute(self._c, u'UPDATE media SET viewed = 0 WHERE download_status=?',(D_DOWNLOADING,)) self._db_execute(self._c, u'UPDATE media SET download_status=? WHERE download_status=?',(D_RESUMABLE,D_DOWNLOADING)) self._db.commit() def get_entry_download_status(self, entry_id): self._db_execute(self._c, u'SELECT download_status, viewed FROM media WHERE download_status!=0 AND entry_id=?',(entry_id,)) result = self._c.fetchall() #if entry_id==262: # print result if result: dataList = [list(row) for row in result] else: return 0 for datum in dataList: val = int(datum[0]) if val==D_DOWNLOADING: return D_DOWNLOADING if val==D_ERROR: return D_ERROR if val==D_RESUMABLE: return D_RESUMABLE return D_DOWNLOADED def get_feed_poll_fail(self, feed_id): feed_id = self._resolve_pointed_feed(feed_id) self._db_execute(self._c, u'SELECT pollfail FROM feeds WHERE rowid=?',(feed_id,)) result = self._c.fetchone()[0] if result==0: return False return True def get_feed_download_status(self, feed_id): #feed_id = self._resolve_pointed_feed(feed_id) entrylist = self.get_entrylist(feed_id) for entry in entrylist: status = self.get_entry_download_status(entry[0]) if status!=D_NOT_DOWNLOADED: return status return D_NOT_DOWNLOADED def get_feed_verbose(self, feed_id): """This function is slow, but all of the time is in the execute and fetchall calls. I can't even speed it up if I do my own sort. profilers don't lie!""" feed_info = {} #is_filter = False #if utils.HAS_SEARCH: # is_filter = self.is_feed_filter(feed_id) #if is_filter or self.cache_dirty: flaglist = self.get_entry_flags(feed_id) feed_info['important_flag'] = self.get_feed_flag(feed_id, flaglist) #not much speeding up this feed_info['entry_count'] = len(flaglist) feed_info['unread_count'] = len([f for f in flaglist if f & F_UNVIEWED]) #else: # self._db_execute(self._c, u'SELECT flag_cache, unread_count_cache, entry_count_cache FROM feeds WHERE rowid=?',(feed_id,)) # cached_info = self._c.fetchone() # feed_info['important_flag'] = cached_info[0] # feed_info['unread_count'] = cached_info[1] # sfeed_info['entry_count'] = cached_info[2] self._db_execute(self._c, u'SELECT pollfail FROM feeds WHERE rowid=?',(feed_id,)) result = self._c.fetchone()[0] if result==0: feed_info['poll_fail'] = False else: feed_info['poll_fail'] = True return feed_info def get_entry_flag(self, entry_id, medialist=None, read=None, media_entries=None): if self.entry_flag_cache.has_key(entry_id): return self.entry_flag_cache[entry_id] importance=0 if read is None: self._db_execute(self._c, u'SELECT read FROM entries WHERE rowid=?',(entry_id,)) read = self._c.fetchone()[0] if medialist is None: if media_entries is not None: if entry_id not in media_entries: medialist = [] else: medialist = self.get_entry_media(entry_id) else: medialist = self.get_entry_media(entry_id) status = D_NOT_DOWNLOADED if medialist: for medium in medialist: if medium['download_status'] == D_DOWNLOADING: status = D_DOWNLOADING break if medium['download_status'] == D_ERROR: status = D_ERROR break if medium['download_status'] == D_RESUMABLE: status = D_RESUMABLE break if medium['download_status'] == D_DOWNLOADED: status = D_DOWNLOADED break if status == D_ERROR: importance = importance + F_ERROR if status == D_DOWNLOADING: importance = importance + F_DOWNLOADING if medialist: importance = importance + F_MEDIA if status == D_DOWNLOADED: importance = importance + F_DOWNLOADED elif status == D_RESUMABLE: importance = importance + F_PAUSED for medium in medialist: if medium['viewed'] == 0: importance = importance + F_UNVIEWED break else: if int(read) == 0: importance = importance + F_UNVIEWED if USING_FLAG_CACHE: self.entry_flag_cache[entry_id] = importance return importance def get_entry_for_hash(self, e_hash): self._db_execute(self._c, u'SELECT feed_id, rowid FROM entries WHERE hash=?', (e_hash,)) retval = self._c.fetchone() if retval is None: return None, None return retval def get_entries_for_hashes(self, hashlist, read=None): if len(hashlist) == 0: return [] retval = [] subset = [] while len(hashlist) > 0: subset = hashlist[:900] qmarks = "?,"*(len(subset)-1)+"?" condition = '' if read is not None: if read: condition = ' AND read=1' else: condition = ' AND read=0' self._db_execute(self._c, u'SELECT feed_id, rowid, read FROM entries WHERE hash IN ('+qmarks+')'+condition, tuple(subset)) r = self._c.fetchall() if r is not None: retval += r hashlist = hashlist[900:] return retval def get_hashes_for_entries(self, entrylist): if len(entrylist) == 0: return [] retval = [] subset = [] while len(entrylist) > 0: subset = entrylist[:900] qmarks = "?,"*(len(subset)-1)+"?" self._db_execute(self._c, u'SELECT hash FROM entries WHERE rowid IN ('+qmarks+')', tuple(subset)) r = self._c.fetchall() if r is not None: retval += r entrylist = entrylist[900:] return [r[0] for r in retval] def get_unread_hashes(self): self._db_execute(self._c, u'SELECT hash FROM entries WHERE read=0') retval = self._c.fetchall() if retval is None: return [] return [r[0] for r in retval] def get_unread_entries(self, feed_id): if self.is_feed_filter(feed_id): if not self._filtered_entries.has_key(feed_id): self.get_filtered_entries(feed_id) return [r[0] for r in self.get_entrylist(feed_id) if r[3] == 0] self._db_execute(self._c, u'SELECT rowid FROM entries WHERE feed_id=? AND read=0', (feed_id,)) retval = self._c.fetchall() if retval is None: return [] return [r[0] for r in retval] def get_unread_count(self, feed_id): if self.is_feed_filter(feed_id): if not self._filtered_entries.has_key(feed_id): self.get_filtered_entries(feed_id) entries = self._filtered_entries[feed_id] list = [] for entry in entries: self._db_execute(self._c, u'SELECT read FROM entries WHERE rowid=?',(entry[0],)) try: list.append(self._c.fetchone()) except: pass unread=0 for item in list: if item[0]==0: unread=unread+1 else: feed_id = self._resolve_pointed_feed(feed_id) self._db_execute(self._c, u'SELECT count(*) FROM entries WHERE feed_id=? and read=0', (feed_id,)) unread = self._c.fetchone()[0] return unread def correct_unread_count(self, feed_id): #FIXME: we shouldn't need this one day """ Set the entry_read flag to the correct value based on all its enclosures. This is necessary because there are some bugs with regard to when this value gets set. """ if self.is_feed_filter(feed_id): return #just don't do anything #feed_id = self._resolve_pointed_feed(feed_id) entrylist = self.get_entrylist(feed_id) if entrylist: for entry in entrylist: flag = self.get_entry_flag(entry[0]) if flag & F_UNVIEWED: self.set_entry_read(entry[0],False) else: self.set_entry_read(entry[0],True) def get_entry_flags(self, feed_id): medialist=None flaglist = [] if self.is_feed_filter(feed_id): if not self._filtered_entries.has_key(feed_id): self.get_filtered_entries(feed_id) entrylist = [e[0] for e in self._filtered_entries[feed_id]] for entry in entrylist: flaglist.append(self.get_entry_flag(entry)) else: self._db_execute(self._c, u'SELECT rowid, read FROM entries WHERE feed_id=?',(feed_id,)) entrylist = self._c.fetchall() if self.get_feed_media_count(feed_id) == 0: medialist = [] media_entries = [] else: self._db_execute(self._c, u"""SELECT entry_id FROM media WHERE feed_id=?""", (feed_id,)) media_entries = self._c.fetchall() if media_entries is None: media_entries = [] else: media_entries = [r[0] for r in media_entries] for entry,read in entrylist: flaglist.append(self.get_entry_flag(entry, read=read, medialist=medialist, media_entries=media_entries)) return flaglist def get_feed_flag(self, feed_id, flaglist = None): """ Based on a feed, what flag best represents the overall status of the feed at top-level? This is based on the numeric value of the flag, which is why flags are enumed the way they are.""" feed_has_media=0 if flaglist is None: flaglist = self.get_entry_flags(feed_id) if len(flaglist)==0: return 0 flaglist.sort()#lambda x,y:x[1]-y[1]) best_flag = flaglist[-1] if best_flag & F_DOWNLOADED == 0 and feed_has_media==1: return best_flag + F_DOWNLOADED else: return best_flag def get_feeds_for_tag(self, tag): self._db_execute(self._c, u'SELECT DISTINCT feeds.rowid FROM feeds INNER JOIN tags ON tags.feed_id=feeds.rowid WHERE tag=?',(tag,)) result = self._c.fetchall() if result is None: return [] return [r[0] for r in result] def get_feeds_for_flag(self, tag): self._db_execute(self._c, u'SELECT DISTINCT feeds.rowid FROM feeds WHERE flags & ? == ?',(tag,tag)) result = self._c.fetchall() if result is None: return [] return [r[0] for r in result] def get_tags_for_feed(self, feed_id): self._db_execute(self._c, u'SELECT tag FROM tags WHERE feed_id=? ORDER BY tag',(feed_id,)) result = self._c.fetchall() dataList = [] if result: dataList = [row[0] for row in result] else: return [] return dataList def get_flags_for_feed(self, feed_id): self._db_execute(self._c, u'SELECT flags FROM feeds WHERE rowid=?',(feed_id,)) result = self._c.fetchone() if result: return result[0] return 0 def set_flags_for_feed(self, feed_id, flags): self._db_execute(self._c, u'UPDATE feeds SET flags=? WHERE rowid=?',(flags, feed_id)) self._db.commit() def get_search_tag(self, tag): self._db_execute(self._c, u'SELECT query FROM tags WHERE tag=?',(tag,)) result = self._c.fetchone() if result: return result[0] return [] def get_search_tags(self): self._db_execute(self._c, u'SELECT tag,query FROM tags WHERE type=? ORDER BY tag',(T_SEARCH,)) result = self._c.fetchall() if result: return result return [] def add_tag_for_feed(self, feed_id, tag): current_tags = self.get_tags_for_feed(feed_id) self._db_execute(self._c, u'SELECT favorite FROM tags WHERE tag=? LIMIT 1',(tag,)) favorite = self._c.fetchone() try: favorite = favorite[0] except: favorite = 0 if current_tags: if tag not in current_tags and len(tag)>0: self._db_execute(self._c, u'INSERT INTO tags (tag, feed_id, type, favorite) VALUES (?,?,?,?)',(tag,feed_id, T_TAG, favorite)) self._db.commit() else: self._db_execute(self._c, u'INSERT INTO tags (tag, feed_id, type, favorite) VALUES (?,?,?,?)',(tag,feed_id, T_TAG, favorite)) self._db.commit() def fix_tags(self): self._db_execute(self._c, u'DELETE FROM tags WHERE tag=""') self._db.commit() def add_search_tag(self, query, tag, favorite=False): current_tags = [t[0] for t in self.get_all_tags(T_ALL)] #exclude favorite stuff if current_tags: if tag not in current_tags: self._db_execute(self._c, u'INSERT INTO tags (tag, feed_id, query, type, favorite) VALUES (?,?,?,?,?)',(tag,0,query,T_SEARCH,favorite)) self._db.commit() else: raise TagAlreadyExists,"The tag name "+str(tag)+" is already being used" else: self._db_execute(self._c, u'INSERT INTO tags (tag, feed_id, query, type) VALUES (?,?,?,?,?)',(tag,0,query,T_SEARCH,favorite)) self._db.commit() def change_query_for_tag(self, tag, query): try: self._db_execute(self._c, u'UPDATE tags SET query=? WHERE tag=?',(query,tag)) self._db.commit() except: logging.error("error updating tag") def set_tag_favorite(self, tag, favorite=False): try: self._db_execute(self._c, u'UPDATE tags SET favorite=? WHERE tag=?',(favorite,tag)) self._db.commit() except: logging.error("error updating tag favorite") def rename_tag(self, old_tag, new_tag): self._db_execute(self._c, u'UPDATE tags SET tag=? WHERE tag=?',(new_tag,old_tag)) self._db.commit() def remove_tag_from_feed(self, feed_id, tag): self._db_execute(self._c, u'DELETE FROM tags WHERE tag=? AND feed_id=?',(tag,feed_id)) self._db.commit() def remove_tag(self, tag): self._db_execute(self._c, u'DELETE FROM tags WHERE tag=?',(tag,)) self._db.commit() def get_all_tags(self, type=T_TAG): if type==T_ALL: self._db_execute(self._c, u'SELECT DISTINCT tag,favorite FROM tags') elif type==T_TAG: self._db_execute(self._c, u'SELECT DISTINCT tag,favorite FROM tags WHERE type=?',(T_TAG,)) elif type==T_SEARCH: self._db_execute(self._c, u'SELECT DISTINCT tag,favorite FROM tags WHERE type=?',(T_SEARCH,)) result = self._c.fetchall() def alpha_sorter(x,y): if x[0].upper()>y[0].upper(): return 1 if x[0].upper()==y[0].upper(): return 0 return -1 result.sort(alpha_sorter) #sometimes a tag has two different favorite settings due to a bug. #just work around it and get rid of the extras result = utils.uniquer(result, lambda x: x[0]) return result def get_count_for_tag(self, tag): self._db_execute(self._c, u'SELECT count(*) FROM tags WHERE tag=?',(tag,)) result = self._c.fetchone()[0] return result def export_OPML(self,stream): if not utils.HAS_PYXML: return import OPML self._db_execute(self._c, u'SELECT title, description, url FROM feeds ORDER BY UPPER(title)') result = self._c.fetchall() dataList = [] if result: dataList = [list(row) for row in result] else: return o = OPML.OPML() o['title']='All' for feed in result: item = OPML.Outline() item['title']=self._ascii(feed[0]) item['text']=self._ascii(feed[0]) if feed[1] is None: item['description'] = "" else: item['description'] = self._ascii(feed[1]) item['xmlUrl']=feed[2] o.outlines.append(item) o.output(stream) stream.close() def import_subscriptions(self, stream, opml = True): """A generator which first yields the number of feeds, and then the feedids as they are inserted, and finally -1 on completion""" if not utils.HAS_PYXML and opml == True: logging.warning("Trying to import an OPML, but we don't have pyxml. Aborting import") yield (-1,0) yield (1,0) yield (-1,0) return if opml: import OPML try: p = OPML.parse(stream) except: exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s logging.warning(error_msg) stream.close() yield (-1,0) added_feeds=[] yield (1,len(p.outlines)) for o in OPML.outline_generator(p.outlines): try: feed_id=self.insertURL(o['xmlUrl'],o['text']) if o.has_key('categories'): for tag in o['categories'].split(','): tag = tag.strip() self.add_tag_for_feed(feed_id, tag) #added_feeds.append(feed_id) yield (1,feed_id) except FeedAlreadyExists, f: yield (0,f.feed) except: exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s logging.warning(error_msg) yield (-1,0) stream.close() #return added_feeds yield (-1,0) else: #just a list in a file url_list = [] count = 0 for line in stream.readlines(): line = line.strip() if len(line) == 0: continue space_at = line.find(' ') if space_at >= 0: url = line[:space_at] title = line[space_at+1:] else: url = line title = None count+=1 url_list.append((url, title)) stream.close() yield (1,len(url_list)) for url, title in url_list: try: feed_id=self.insertURL(url, title) yield (1,feed_id) except FeedAlreadyExists, f: yield (0,f.feed) except: exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s logging.warning(error_msg) yield (-1,0) yield (-1,0) def search(self, query, filter_feed=None, blacklist=None, since=0): if not utils.HAS_SEARCH: return ([],[]) if blacklist is None: blacklist = self._blacklist if filter_feed: #no blacklist on filter feeds (doesn't make sense) result = [l for l in self.searcher.Search(query, since=since)[1] if l[3] == filter_feed] if len(result) > 0: return ([filter_feed], result) return ([],[]) return self.searcher.Search(query,blacklist, since=since) def doindex(self, callback=None): if utils.HAS_SEARCH: self.searcher.Do_Index_Threaded(callback) def reindex(self, feed_list=[], entry_list=[], threaded=True): """reindex self._reindex_feed_list and self._reindex_entry_list as well as anything specified""" if not utils.HAS_SEARCH: return self._reindex_feed_list += feed_list self._reindex_entry_list += entry_list try: if threaded: self.searcher.Re_Index_Threaded(self._reindex_feed_list, self._reindex_entry_list) else: self.searcher.Re_Index(self._reindex_feed_list, self._reindex_entry_list) except Exception, e: logging.warning("reindex failure. wait til next time I guess: %s" % str(e)) self._reindex_feed_list = [] self._reindex_entry_list = [] def cache_images(self): """goes through _image_cache_list and caches everything""" if self._image_cache is not None: while len(self._image_cache_list) > 0: entry_id = self._image_cache_list.pop(0) body = self.get_entry(entry_id)['description'] self._image_cache.cache_html(str(entry_id), body) while len(self._image_uncache_list) > 0: entry_id = self._image_uncache_list.pop(0) self._image_cache.remove_cache(entry_id) def _resolve_pointed_feed(self, feed_id): if not utils.HAS_SEARCH: return feed_id self._db_execute(self._c, u'SELECT feed_pointer FROM feeds WHERE rowid=?',(feed_id,)) result = self._c.fetchone() if result is None: return feed_id if result[0] >= 0: return result[0] return feed_id def is_feed_filter(self, feed_id): if not utils.HAS_SEARCH: return False self._db_execute(self._c, u'SELECT feed_pointer FROM feeds WHERE rowid=?',(feed_id,)) result = self._c.fetchone() if result is None: return False if result[0] >= 0: return True return False def get_pointer_feeds(self, feed_id): if not utils.HAS_SEARCH: return [] self._db_execute(self._c, u'SELECT rowid FROM feeds WHERE feed_pointer=?',(feed_id,)) results = self._c.fetchall() if results is None: return [] return [f[0] for f in results] def get_associated_feeds(self, feed_id): if not utils.HAS_SEARCH: return [feed_id] feed_list = [feed_id] pointer = self._resolve_pointed_feed(feed_id) if pointer != feed_id: feed_list.append(pointer) feed_list += self.get_pointer_feeds(feed_id) return feed_list def set_cache_images(self, cache): if self._image_cache is not None: if not cache: self._image_cache.finish() self._image_cache = None else: if cache: store_location = self.get_setting(STRING, '/apps/penguintv/media_storage_location', os.path.join(utils.get_home(), "media")) if store_location != "": self._image_cache = OfflineImageCache.OfflineImageCache(os.path.join(store_location, "images")) else: logging.error("could not start image cache, no storage location") #############convenience Functions####################3 def _encode_text(self,text): try: return text.encode('utf8') except: return u'' def _ascii(self, text): try: return text.encode('ascii','replace') except UnicodeDecodeError: return u'' def DEBUG_get_full_feedlist(self): self._db_execute(self._c, """SELECT rowid,title,url FROM feeds ORDER BY rowid""") result = self._c.fetchall() return result def DEBUG_reset_freqs(self): self._db_execute(self._c, 'UPDATE feeds SET pollfreq=1800') self._db.commit() def DEBUG_get_freqs(self): self._db_execute(self._c, 'SELECT title, pollfreq, lastpoll, rowid FROM feeds ORDER BY title') a = self._c.fetchall() max_len = 0 for item in a: if len(item[0]) > max_len: max_len = len(item[0]) for item in a: try: #item2=(str(item[0]),item[1]/(60),time.asctime(time.localtime(item[2]))) print self._ascii(item[0])+" "*(max_len-len(str(item[0])))+" "+str(item[1]/60)+" "+time.asctime(time.localtime(item[2]))+" "+str(item[3]) except: print "whoops: "+ self._ascii(item[0]) #print item2 print "-"*80 self._db_execute(self._c, 'SELECT title, pollfreq, lastpoll, rowid FROM feeds ORDER BY lastpoll') a = self._c.fetchall() max_len = 0 for item in a: if len(item[0]) > max_len: max_len = len(item[0]) for item in a: try: #item2=(str(item[0]),item[1]/(60),time.asctime(time.localtime(item[2]))) print self._ascii(item[0])+" "*(max_len-len(str(item[0])))+" "+str(item[1]/60)+" "+time.asctime(time.localtime(item[2]))+" "+ str(item[3]) except: print "whoops: "+ self._ascii(item[0]) #print item2 print "-"*80 self._db_execute(self._c, 'SELECT title, pollfreq, lastpoll, rowid FROM feeds ORDER BY pollfreq') a = self._c.fetchall() a.reverse() max_len = 0 for item in a: if len(item[0]) > max_len: max_len = len(item[0]) for item in a: try: #item2=(str(item[0]),item[1]/(60),time.asctime(time.localtime(item[2]))) print self._ascii(item[0])+" "*(max_len-len(self._ascii(item[0])))+" "+str(item[1]/60)+" "+time.asctime(time.localtime(item[2]))+" "+ str(item[3]) except: print "whoops: "+ self._ascii(item[0]) #print item2 def DEBUG_delete_all_media(self): self._db_execute(self._c, u'UPDATE media SET download_status=?',(D_NOT_DOWNLOADED,)) self._db.commit() def DEBUG_correct_feed(self, feed_id): self._db_execute(self._c, u'SELECT media.download_status, media.viewed, media.entry_id, media.rowid FROM media,entries WHERE media.entry_id=entries.rowid AND media.download_status!=? AND entries.feed_id=?',(D_NOT_DOWNLOADED,feed_id)) media = self._c.fetchall() for item in media: self.set_entry_read(item[2],item[1]) class NoFeed(Exception): def __init__(self,feed): self.feed = feed def __str__(self): return self.feed class FeedPollError(Exception): def __init__(self,feed,msg="unspecified error"): self.feed = feed self.msg = msg def __str__(self): return str(self.feed)+": "+self.msg class NoEntry(Exception): def __init__(self,entry): self.entry = entry def __str__(self): return self.entry class NoSetting(Exception): def __init__(self,setting): self.setting = setting def __str__(self): return self.setting class DBError(Exception): def __init__(self,error): self.error = error def __str__(self): return self.error class FeedAlreadyExists(Exception): def __init__(self,feed): self.feed = feed def __str__(self): return self.feed class TagAlreadyExists(Exception): def __init__(self,tag): self.tag = tag def __str__(self): return self.tag class BadSearchResults(Exception): def __init__(self,m): self.m = m def __str__(self): return self.m PenguinTV-4.2.0/penguintv/SqliteSyncClient.py0000644000000000000000000002427411302013317016110 0ustar import time import logging import traceback import tempfile import sqlite3 import os class SqliteSyncClient: def __init__(self): self._username = None self._password = None self._sync_file = None self._authenticated = False self._local_timestamp = 0 self._no_updates = False self._readonly = False self._bad_db = False def set_username(self, username): if username == self._username: return self.finish() self._username = username def set_password(self, password): if password == self._password: return self.finish() self._password = password def set_readonly(self, readonly): self._readonly = readonly def finish(self, last_upload=[]): if self._sync_file is not None: db = self._get_db() if db is not None: self.submit_readstates(last_upload, do_upload=False, noclosedb=db) c = db.cursor() one_month = int(time.time()) - (60*60*24*30) c.execute('DELETE FROM readinfo WHERE timestamp < ?', (one_month,)) db.commit() #c.execute('VACUUM') #db.commit() c.close() if len(last_upload) > 0: self._close_and_send_db(db) else: db.close() os.remove(self._sync_file) self._sync_file = None self._authenticated = False return True def authenticate(self): if len(self._username) == 0: return False if self._authenticated: self._authenticated = False try: success = self._do_authenticate() except Exception, e: logging.error("error authenticating: %s" % str(e)) success = False self.__logging_in = False self._authenticated = success return success def submit_readstates(self, readstates, do_upload=True, noclosedb=None): """Returns True on success, False on error""" if self._readonly: logging.debug("in readonly mode, not submitting") return True logging.debug("ArticleSync Submitting %i readstates" % len(readstates)) if len(readstates) == 0: #logging.debug("(returning immediately)") return True if do_upload and noclosedb is not None: logging.error("Can't upload without closing DB, so this makes no sense") if noclosedb is None: db = self._get_db() if db is None: self._sync_file = None db = self._create_db() else: db = noclosedb try: c = db.cursor() c.execute(u'SELECT * FROM readinfo LIMIT 1') except Exception, e: logging.error("Bad Articlesync DB, recreating: %s" % str(e)) self._sync_file = None db = self._create_db() c = db.cursor() timestamp = int(time.time()) hashes = [r[0] for r in readstates] existing = [] while len(hashes) > 0: subset = hashes[:900] qmarks = '?,'*(len(subset)-1)+'?' c.execute(u'SELECT hash FROM readinfo WHERE hash IN ('+qmarks+')', \ tuple(subset)) batch = c.fetchall() if batch is None: batch = [] existing = existing + batch hashes = hashes[900:] existing = [r[0] for r in existing] for entry_hash, readstate in readstates: #logging.debug(": %s %i %i" % (entry_hash, timestamp, readstate)) if entry_hash in existing: c.execute(u'UPDATE readinfo SET readstate=?, timestamp=? WHERE hash=?', (readstate, timestamp, entry_hash)) else: c.execute(u'INSERT INTO readinfo (hash, timestamp, readstate) VALUES (?,?,?)', (entry_hash, timestamp, readstate)) db.commit() c.close() if do_upload: return self._close_and_send_db(db) if noclosedb is None: db.close() return True def get_readstates(self, hashlist): """takes a list of hashes, asks the db for their readstates returns a hash of entryhash:readstate""" try: db = self._get_db() c = db.cursor() except InternetFail, e: self._bad_db = False return [] except Exception, e: self._bad_db = True return None readstates = [] while len(hashlist) > 0: subset = hashlist[:900] qmarks = '?,'*(len(subset)-1)+'?' try: c.execute(u'SELECT hash, readstate FROM readinfo WHERE hash IN ('+qmarks+')', \ tuple(subset)) except Exception, e: self._bad_db = True return None batch = c.fetchall() if batch is None: batch = [] readstates += batch hashlist = hashlist[900:] c.close() db.close() return readstates def get_readstates_since(self, timestamp): """takes a timestamp, asks the db for hashes since then returns a hash of entryhash:readstate""" try: server_timestamp = self._get_server_timestamp() except Exception, e: logging.error("error getting timestamp: %s" % str(e)) return [] if server_timestamp == -1: logging.error("error getting timestamp: (-1)") return [] if self._no_updates: #logging.debug("server time %i, our time %i" % (server_timestamp, self._local_timestamp)) if server_timestamp == self._local_timestamp: #logging.debug("no updates last time, so no point checking") return [] if server_timestamp < self._local_timestamp: logging.debug("server timestamp is less than local, so clocks must be off. Using their time") timestamp = server_timestamp try: db = self._get_db(server_timestamp) c = db.cursor() except InternetFail, e: self._bad_db = False return [] except Exception, e: self._bad_db = True return None try: c.execute(u'SELECT hash, readstate FROM readinfo WHERE timestamp >= ?', (timestamp,)) except Exception, e: self._bad_db = True return None new_hashes = c.fetchall() #logging.debug("result: %s" % str(new_hashes)) c.execute(u'SELECT hash, readstate, timestamp FROM readinfo') r = c.fetchall() #for row in r: # logging.debug("whole: %s" % str(row)) c.close() db.close() if new_hashes is None: new_hashes = [] if len(new_hashes) == 0: #logging.debug("No results, so if the server doesn't update next time we won't download it") self._no_updates = True return [] return new_hashes def _get_db(self, server_timestamp=None): if self._bad_db: self._bad_db = False return self._create_db() if self._sync_file is None: return self._download_db() try: #try anyway to catch internet problems s = self._get_server_timestamp() if server_timestamp is None: server_timestamp = s except Exception, e: logging.error("error getting timestamp: %s" % str(e)) raise InternetFail("error getting timestamp: %s" % str(e)) if server_timestamp == -1: raise InternetFail("error getting timestamp: (-1)") if server_timestamp != self._local_timestamp: #logging.debug("sync time unexpectedly changed %i %i" \ # % (server_timestamp, self._local_timestamp)) return self._download_db() try: return sqlite3.connect(self._sync_file) except Exception, e: logging.error("error loading articlesync db: %s %s" % (type(e), str(e))) self._bad_db = True return None def _download_db(self): self._no_updates = False try: if not self._db_exists(): logging.error("no db found") self._bad_db = True return None except Exception, e: logging.warning("No internet connection, cancelling: %s" % str(e)) raise InternetFail("No internet connection, cancelling: %s" % str(e)) try: db_data = self._do_download_db() except Exception, e: logging.error("error downloading db: %s" % str(e)) self._bad_db = True return None if self._sync_file is None: self._sync_file = tempfile.mkstemp(suffix='.db')[1] fp = open(self._sync_file, 'wb') fp.write(db_data) #logging.debug("Downloaded %i bytes" % fp.tell()) fp.close() try: self._local_timestamp = self._get_server_timestamp() except Exception, e: logging.error("error getting timestamp: %s" % str(e)) if self._local_timestamp == -1: logging.error("error getting timestamp, using anyway?: %s" % str(e)) try: return sqlite3.connect(self._sync_file) except Exception, e: #problem with the db, have to start over logging.error("error loading articlesync db (2): %s %s" % (type(e), str(e))) self._bad_db = True return None def _create_db(self): logging.debug("creating new db") self._sync_file = tempfile.mkstemp(suffix='.db')[1] db = sqlite3.connect(self._sync_file) c = db.cursor() c.execute(u"""CREATE TABLE readinfo ( id INTEGER PRIMARY KEY, hash TEXT NOT NULL, timestamp INTEGER NOT NULL, readstate BOOL NOT NULL );""") db.commit() c.close() self._local_timestamp = int(time.time()) #logging.debug("SETTING server TIMESTAMP2: %i" % self._local_timestamp) try: if not self._set_server_timestamp(self._local_timestamp): logging.error("error setting timestamp") except Exception, e: logging.error("error setting timestamp: %s" % str(e)) return db def _close_and_send_db(self, db): """close the db and send it""" db.close() fp = open(self._sync_file, 'rb') try: success = self._upload_db(fp) except Exception, e: logging.error("error uploading db: %s" % str(e)) success = False if not success: logging.debug("error uploading readstate database") return False #logging.debug("Uploaded %i bytes" % fp.tell()) fp.close() self._local_timestamp = int(time.time()) #logging.debug("SETTING server TIMESTAMP3: %i" % self._local_timestamp) try: if not self._set_server_timestamp(self._local_timestamp): logging.error("error setting timestamp") return False except Exception, e: logging.error("error setting timestamp: %s" % str(e)) return False return True def _reset_db(self): db = self._create_db() self._close_and_send_db(db) ##### extended class functions##### def _do_authenticate(self): """Authenticates to the server with self._username and self._password. Returns True on success and False on failure""" logging.error("must be implemented in subclass") assert False def _set_server_timestamp(self, timestamp): logging.error("must be implemented in subclass") assert False def _get_server_timestamp(self): logging.error("must be implemented in subclass") assert False def _db_exists(self): logging.error("must be implemented in subclass") assert False def _do_download_db(self): logging.error("must be implemented in subclass") assert False def _upload_db(self, fp): logging.error("must be implemented in subclass") assert False class InternetFail(Exception): def __init__(self,m): self.m = m def __str__(self): return self.m PenguinTV-4.2.0/penguintv/UpdateTasksManager.py0000644000000000000000000001051211302524163016372 0ustar # Written by Owen Williams # see LICENSE for license information import time import traceback, sys import logging import gobject import threading #the manager can either run tasks as a gobject idler, as a thread, #or it can let the application decide when to run the generator GOBJECT=0 THREADED=1 MANUAL=2 FLUSH_TIME = 60*10 class UpdateTasksManager: task_list = [] id_time = 0 time_appendix = 0 def __init__(self, style=GOBJECT, name=""): self.style = style self.threadSleepTime = 0.5 self.updater_running = False self.my_tasks = [] self.name = name self.exception = None def get_task_id(self): cur_time = int(time.time()) if UpdateTasksManager.id_time == cur_time: UpdateTasksManager.time_appendix = UpdateTasksManager.time_appendix+1.0 else: UpdateTasksManager.id_time = cur_time UpdateTasksManager.time_appendix=0.0 return float(UpdateTasksManager.id_time)+(UpdateTasksManager.time_appendix/100) def queue(self, func, arg=None, waitfor=None, clear_completed=True, priority=0, cb=None): task_id = self.get_task_id() if priority==1: self.my_tasks.reverse() self.my_tasks.append((func, arg, cb, task_id, waitfor, clear_completed)) if priority==1: self.my_tasks.reverse() if self.updater_running == False: self.updater_running = True if self.style == GOBJECT: gobject.timeout_add(100, self.updater_gen().next) elif self.style == THREADED: threading.Thread(self.updater_thread) #elif manual, do nothing return task_id def peek(self, index=0): if len(self.my_tasks)>index: return self.my_tasks[index] else: return None def pop(self, index=0): return self.my_tasks.pop(index) def task_count(self): return len(self.my_tasks) def is_completed(self, taskid): if taskid in UpdateTasksManager.task_list: return True return False def clear_completed(self, taskid): UpdateTasksManager.task_list.remove(taskid) def set_completed(self, taskid): UpdateTasksManager.task_list.append(taskid) def updater_thread(self): for item in self.updater_gen(): time.sleep(self.threadSleepTime) def updater_timer(self): self.updater_running = True for item in self.updater_gen(True): pass if self.task_count() > 0: # we didn't finish return True self.updater_running = False return False def updater_gen(self,timed=False): """Generator that empties that queue and yields on each iteration""" skipped=0 waiting_on = [] while self.task_count() > 0: #just run forever self.exception = None var = self.peek(skipped) if var is None: #ran out of tasks skipped=0 waiting_on = [] yield True continue func, args, cb, task_id, waitfor, clear_completed = var if waitfor: #don't pop if false, and if previous tasks think that task isn't #don't yet then also don't do it (to preserve order) if self.is_completed(waitfor) and waitfor not in waiting_on: try: if type(args) is tuple: cb is not None and cb(func(*args)) or func(*args) elif args is not None: cb is not None and cb(func(args)) or func(args) else: cb is not None and cb(func()) or func() except Exception, e: self.exception = e exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s self.set_completed(task_id) if clear_completed: self.clear_completed(waitfor) self.pop(skipped) else: waiting_on.append(waitfor) if time.time() - task_id > FLUSH_TIME: self.pop(skipped) skipped = skipped+1 else: try: if type(args) is tuple: cb is not None and cb(func(*args)) or func(*args) elif args is not None: cb is not None and cb(func(args)) or func(args) else: cb is not None and cb(func()) or func() except Exception, e: self.exception = e exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s print error_msg self.set_completed(task_id) self.pop(skipped) yield True if not timed: self.updater_running = False yield False class BadArgument(Exception): def __init__(self,arg): self.arg = arg def __str__(self): return "Bad Argument: "+self.arg PenguinTV-4.2.0/penguintv/ArticleSync.py0000644000000000000000000003356711175673074015124 0ustar # Synchronize entry read states with a server import urllib import urlparse import threading import logging #import traceback import time import gettext logging.basicConfig(level=logging.DEBUG) _=gettext.gettext import gobject import gtk from ptvDB import FF_MARKASREAD, STRING, INT import utils import amazon import FtpSyncClient ### Debugging uses regular callbacks instead of gobject idlers DEBUG = False PLUGINS = { _("Amazon S3"): ("S3SyncClient", "S3SyncClient"), _("FTP"): ("FtpSyncClient", "FtpSyncClient") } def threaded_func(): def annotate(func): def _exec_cb(self, *args, **kwargs): if not kwargs.has_key('cb'): return func(self, *args, **kwargs) elif kwargs['cb'] is None: del kwargs['cb'] return func(self, *args, **kwargs) def t_func(self, *args, **kwargs): self._operation_lock.acquire() cb = kwargs['cb'] del kwargs['cb'] try: retval = func(self, *args, **kwargs) except Exception, e: retval = None logging.error("Article Sync caught error: %s %s" % (type(e), str(e))) self._operation_lock.release() if type(retval) is tuple: if DEBUG: cb(*retval) else: gobject.idle_add(cb, *retval) else: if DEBUG: cb(retval) else: gobject.idle_add(cb, retval) t = threading.Thread(None, t_func, "ArticleSync", args=(self,) + args, kwargs=kwargs) t.setDaemon(True) t.start() return _exec_cb return annotate def authenticated_func(defaultret=None): def annotate(func): def _exec_cb(self, *args, **kwargs): if not self._enabled: return defaultret elif self._conn is None: self.emit('server-error', "No Connection") return defaultret elif not self._authenticated: self.emit('authentication-error', "Not authenticated") return defaultret else: return func(self, *args, **kwargs) return _exec_cb return annotate class ArticleSync(gobject.GObject): __gsignals__ = { 'update-feed-count': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_INT])), 'got-readstates': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'sent-readstates': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'authentication-error': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'server-error': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])) } def __init__(self, app, entry_view, plugin, enabled=True, readonly=False): gobject.GObject.__init__(self) global BUCKET_NAME, BUCKET_NAME_SUF if app is not None: app.connect('entry-updated', self._entry_updated_cb) app.connect('entries-viewed', self._entries_viewed_cb) app.connect('new-database', self.__new_database_cb) self._db = app.db else: import ptvDB self._db = ptvDB.ptvDB() self._handlers = [] if entry_view is not None: self.set_entry_view(entry_view) self._conn = None self._authenticated = False self._enabled = enabled self._readonly = readonly #diff is a dict of feed_id:readstates #and readstates is a dict of entry_id:readstate self._readstates_diff = {} self._operation_lock = threading.Lock() self._current_plugin = None self.load_plugin(plugin) def update_cb(success): return False gobject.timeout_add(20 * 60 * 1000, self.get_and_send, update_cb) def __new_database_cb(self, app, db): self._db = db def set_entry_view(self, entry_view): for disconnector, h_id in self._handlers: disconnector(h_id) h_id = entry_view.connect('entries-viewed', self._entries_viewed_cb) self._handlers.append((entry_view.disconnect, h_id)) def set_enabled(self, enabled): if self._conn is None: return if self._enabled and self._authenticated and not enabled: #changing to offline self.finish() self._enabled = enabled if not self._enabled: self._authenticated = False def set_readonly(self, readonly): self._readonly = readonly if self._conn is not None: self._conn.set_readonly(readonly) def get_current_plugin(self): return self._current_plugin def get_plugins(self): #will eventually be generated automatically return PLUGINS def get_parameter_ui(self, plugin): if self._conn is None: #logging.debug("no conn, returning") return None if plugin != self._current_plugin: #logging.debug("wrong plugin, returning: %s %s" % (plugin, self._current_plugin)) return None for title in PLUGINS.keys(): if title == plugin: return self._build_ui(plugin, self._conn.get_parameters()) #logging.debug("didn't find plugin, returning") return None def _build_ui(self, plugin, parameters): table = gtk.Table(2, len(parameters), False) y = 0 for label_text, param, default, hidechars in parameters: label = gtk.Label(label_text) label.set_alignment(0, 0.5) table.attach(label, 0, 1, y, y + 1) entry = gtk.Entry() entry.set_visibility(not hidechars) table.attach(entry, 1, 2, y, y + 1) self._setup_entry(plugin.replace('_',''), entry, param, default) y += 1 return table def _setup_entry(self, plugin, widget, param, default): if utils.HAS_GCONF: try: import gconf except: from gnome import gconf conf = gconf.client_get_default() conf.add_dir('/apps/penguintv',gconf.CLIENT_PRELOAD_NONE) conf.notify_add('/apps/penguintv/sync_plugins/%s/%s' % \ (plugin.replace(' ', '_'), param), self._gconf_param_changed, (widget, plugin, param)) value = self._db.get_setting(STRING, '/apps/penguintv/sync_plugins/%s/%s' % \ (plugin.replace(' ', '_'), param), default) widget.set_text(value) widget.connect('changed', self._parameter_changed, plugin, param) def _gconf_param_changed(self, c, connid, entr, (widget, plugin, param)): self._parameter_changed(widget, plugin, param, noset=True) def _parameter_changed(self, widget, plugin, param, noset=False): if not noset: self._db.set_setting(STRING, '/apps/penguintv/sync_plugins/%s/%s' % \ (plugin.replace(' ', '_'), param), widget.get_text()) getattr(self._conn, 'set_%s' % param)(widget.get_text()) def load_plugin(self, plugin=None): if plugin is None: if self._current_plugin is None: return plugin = self._current_plugin self._authenticated = False def _do_load_plugin(): if self.is_working() > 1: return True self._operation_lock.acquire() self._current_plugin = plugin for title in PLUGINS.keys(): if title == plugin: self._conn = getattr(__import__(PLUGINS[title][0]), PLUGINS[title][1])() self._conn.set_readonly(self._readonly) self._load_plugin_settings(plugin) self._operation_lock.release() return False self._conn = None self._operation_lock.release() return False if self._current_plugin is not None: self.finish() gobject.timeout_add(500, _do_load_plugin) else: _do_load_plugin() def _load_plugin_settings(self, plugin): assert self._conn is not None for label, param, default, hidechars in self._conn.get_parameters(): val = self._db.get_setting(STRING, '/apps/penguintv/sync_plugins/%s/%s' % \ (plugin.replace(' ', '_'), param), default) #logging.debug("initializing plugin %s with %s" % (param, val)) getattr(self._conn, 'set_%s' % param)(val) def is_authenticated(self): return self._authenticated def is_enabled(self): return self._enabled def is_working(self): my_threads = [t.getName() for t in threading.enumerate() \ if t.getName().startswith("ArticleSync")] return len(my_threads) def is_loaded(self): return self._conn is not None def finish(self): def empty_cb(arg=None): pass if self._enabled and self._conn is not None: last_diff = self._get_readstates_list(self._readstates_diff) self._readstates_diff = {} conn = self._conn self._conn = None self._do_close_conn(conn, last_diff, cb=empty_cb) @threaded_func() def _do_close_conn(self, conn, states): while self.is_working() > 1: time.sleep(.5) conn.finish(states) @threaded_func() def authenticate(self): """Creates the bucket as part of authentication, helpfully""" if self._conn is None: return False if self._authenticated: while self.is_working() > 1: time.sleep(.5) self._conn.finish() result = self._conn.authenticate() self._authenticated = result return result def disconnected(self): """lost the connection -- no way to shut down""" self._authenticated = False def _entries_viewed_cb(self, app, viewlist): if not self._authenticated: return for feed_id, viewlist in viewlist: for entry_id in viewlist: if not self._readstates_diff.has_key(feed_id): self._readstates_diff[feed_id] = {} self._readstates_diff[feed_id][entry_id] = 1 #logging.debug("sync updated diff: %s" % str(self._readstates_diff)) def _entry_updated_cb(self, app, entry_id, feed_id): if not self._authenticated: return self.diff_entry(entry_id, feed_id) @authenticated_func() def diff_entry(self, entry_id, feed_id): readstate = self._db.get_entry_read(entry_id) if not self._readstates_diff.has_key(feed_id): self._readstates_diff[feed_id] = {} self._readstates_diff[feed_id][entry_id] = readstate #logging.debug("sync updated diff2: %s" % str(self._readstates_diff)) @authenticated_func(True) def get_and_send(self, cb): timestamp = self._db.get_setting(INT, 'article_sync_timestamp', int(time.time()) - (60 * 60 * 24)) self.get_readstates_since(timestamp) if self._readonly: logging.info("Readonly mode, not submitting") else: self.submit_readstates() return True @authenticated_func() def submit_readstates_since(self, timestamp, cb): if self._readonly: logging.info("Readonly mode, not submitting") return readstates = self._db.get_entries_since(timestamp) readstates = [(r[2],r[3]) for r in readstates if r[3] == 1] logging.debug("submitting readstates since %i, there are %i" \ % (timestamp, len(readstates))) self._do_submit_readstates(readstates, cb=cb) @authenticated_func() def submit_readstates(self): if self._readonly: logging.info("Readonly mode, not submitting") return def submit_cb(success): return False readstates = self._get_readstates_list(self._readstates_diff) self._readstates_diff = {} logging.debug("updating %i readstates" % len(readstates)) self._do_submit_readstates(readstates, cb=submit_cb) return True def _get_readstates_list(self, state_dict): read_entries = [] unread_entries = [] for feed_id in state_dict.keys(): for entry_id in state_dict[feed_id].keys(): if state_dict[feed_id][entry_id]: read_entries.append(entry_id) else: unread_entries.append(entry_id) read_hashes = self._db.get_hashes_for_entries(read_entries) readstates = [(r, 1) for r in read_hashes] return readstates @threaded_func() def _do_submit_readstates(self, readstates): #logging.debug("submitting readstates") retval = self._conn.submit_readstates(readstates) if not DEBUG: gtk.gdk.threads_enter() if retval: self.emit('sent-readstates') else: self.emit('server-error', 'Problem submitting readstates') if not DEBUG: gtk.gdk.threads_leave() #logging.debug("finished submitting readstates") return retval @authenticated_func() def get_readstates_since(self, timestamp): logging.debug("getting readstates since %i" % timestamp) self._do_get_readstates_since(timestamp, cb=self.get_readstates_cb) @threaded_func() def _do_get_readstates_since(self, timestamp): return self._conn.get_readstates_since(timestamp) @authenticated_func() def get_readstates(self, hashlist): if len(hashlist) == 0: return logging.debug("getting readstates for %i entries" % len(hashlist)) self._do_get_readstates(hashlist, cb=self.get_readstates_cb) @authenticated_func() def get_readstates_for_entries(self, entrylist): """take an entrylist, build a list of hashes, ask for their readstates""" if len(entrylist) == 0: return logging.debug("getting %i readstates" % len(entrylist)) hashlist = self._db.get_hashes_for_entries(entrylist) self._do_get_readstates(hashlist, cb=self.get_readstates_cb) @threaded_func() def _do_get_readstates(self, hashlist): return self._conn.get_readstates(hashlist) def get_readstates_cb(self, readstates): def submit_cb(success): return False if readstates is None: logging.debug("Got none, must be new db, submitting last 2 weeks") then = time.time() - (60*60*24*14) self.submit_readstates_since(then, submit_cb) self.emit('got-readstates', []) return False if len(readstates) == 0: logging.debug("No readstates to report") self.emit('got-readstates', []) return False unread_hashes = [] read_hashes = [] for entry_hash, readstate in readstates: if readstate: read_hashes.append(entry_hash) else: unread_hashes.append(entry_hash) unread_entries = \ self._db.get_entries_for_hashes(read_hashes) unread_entries.sort() #logging.debug("hash to entry conversion result: %i known %i unknown" \ # % (len(unread_entries), len(readstates) - len(unread_entries))) viewlist = [] cur_feed_id = None cur_list = [] for feed_id, entry_id, readstate in unread_entries: if feed_id != cur_feed_id: if len(cur_list) > 0: viewlist.append((cur_feed_id, cur_list)) cur_list = [] cur_feed_id = feed_id if readstate == 0: cur_list.append(entry_id) #else: # logging.debug("programming error: should never be true") if len(cur_list) > 0: viewlist.append((cur_feed_id, cur_list)) logging.debug("marking %i as viewed" % len(viewlist)) self.emit('got-readstates', viewlist) return False PenguinTV-4.2.0/penguintv/ThreadPool.py0000644000000000000000000001675511024322323014722 0ustar # Written by Owen Williams # see LICENSE for license information import threading import logging import time try: import PyLucene HAS_LUCENE = True except: HAS_LUCENE = False # Ensure booleans exist (not needed for Python 2.2.1 or higher) try: True except NameError: False = 0 True = not False class ThreadPool: """Flexible thread pool class. Creates a pool of threads, then accepts tasks that will be dispatched to the next available thread.""" def __init__(self, numThreads,name="ThreadPoolThread", lucene_compat=False): """Initialize the thread pool with numThreads workers.""" self.__threads = [] self.__resizeLock = threading.Condition(threading.Lock()) self.__taskLock = threading.Condition(threading.Lock()) self.__tasks = [] self.__isJoining = False self.__name = name self.__maxThreads = numThreads self.occupied_threads = 0 self.lucene_compat = lucene_compat #self.setThreadCount(numThreads) # def setThreadCount(self, newNumThreads): # # """ External method to set the current pool size. Acquires # the resizing lock, then calls the internal version to do real # work.""" # # # Can't change the thread count if we're shutting down the pool! # if self.__isJoining: # return False # # self.__resizeLock.acquire() # try: # self.__setThreadCountNolock(newNumThreads) # finally: # self.__resizeLock.release() # return True # # def __setThreadCountNolock(self, newNumThreads): # # """Set the current pool size, spawning or terminating threads # if necessary. Internal use only; assumes the resizing lock is # held.""" # # # If we need to grow the pool, do so # while newNumThreads > len(self.__threads): # if self.lucene_compat: # newThread = LuceneThreadPoolThread(self,self.__name) # else: # newThread = ThreadPoolThread(self,self.__name) # self.__threads.append(newThread) # newThread.start() # # If we need to shrink the pool, do so # while newNumThreads < len(self.__threads): # self.__threads[0].goAway() # del self.__threads[0] def getThreadCount(self): """Return the number of threads in the pool.""" self.__resizeLock.acquire() try: return len(self.__threads) finally: self.__resizeLock.release() def getTaskCount(self): """Return the number of queued items""" return len(self.__tasks)+self.occupied_threads def queueTask(self, task, args=None, taskCallback=None): """Insert a task into the queue. task must be callable; args and taskCallback can be None.""" if self.__isJoining == True: return False if not callable(task): return False self.__resizeLock.acquire() i = 0 deadlist = [] for t in self.__threads: if not t.isAlive(): deadlist.append(i) i+=1 deadlist.reverse() for i in deadlist: #logging.debug("deleting dead thread %i" % i) del self.__threads[i] self.__resizeLock.release() self.__taskLock.acquire() try: self.__tasks.append((task, args, taskCallback)) #logging.debug("max: %i, running: %i, used: %i, tasks: %i start new?" % (self.__maxThreads, len(self.__threads), self.occupied_threads, len(self.__tasks))) if self.__maxThreads > len(self.__threads): #logging.debug("yes start new") if self.lucene_compat: newThread = LuceneThreadPoolThread(self,self.__name) else: newThread = ThreadPoolThread(self,self.__name) self.__threads.append(newThread) newThread.start() #else: # logging.debug("no, thread limit or some unoc") return True finally: self.__taskLock.release() def getNextTask(self): """ Retrieve the next task from the task queue. For use only by ThreadPoolThread objects contained in the pool.""" self.__taskLock.acquire() try: if self.__tasks == []: return (None, None, None) else: task, args, taskCallback = self.__tasks.pop(0) if not callable(task): logging.warning("task no longer callable, skipping") return (None, None, None) return (task, args, taskCallback) finally: self.__taskLock.release() def joinAll(self, waitForTasks = True, waitForThreads = True): """ Clear the task queue and terminate all pooled threads, optionally allowing the tasks and threads to finish.""" # Mark the pool as joining to prevent any more task queueing self.__isJoining = True # Wait for tasks to finish if waitForTasks: while self.__tasks != []: time.sleep(.1) else: self.__tasks = [] # Tell all the threads to quit self.__resizeLock.acquire() try: self.__isJoining = True # Wait until all threads have exited if waitForThreads: while len(self.__threads)>0: self.__threads[0].goAway() self.__threads[0].join(6) del self.__threads[0] else: while len(self.__threads)>0: del self.__threads[0] # Reset the pool for potential reuse self.__isJoining = False finally: self.__resizeLock.release() class ThreadPoolThread(threading.Thread): """ Pooled thread class. """ threadSleepTime = 1.0 def __init__(self, pool, n="ThreadPoolThread"): """ Initialize the thread and remember the pool. """ threading.Thread.__init__(self,name=n) self.__pool = pool self.__isDying = False def run(self): """ Until told to quit, retrieve the next task and execute it, calling the callback if any. """ start_time = time.time() while self.__isDying == False: cmd, args, callback = self.__pool.getNextTask() # If there's nothing to do, die if cmd is None: #logging.debug("nothing to do") if time.time() - start_time > 10: #logging.debug("dying") break time.sleep(ThreadPoolThread.threadSleepTime) continue if callback is None: self.__pool.occupied_threads+=1 #logging.debug("max: %i, running: %i, used: %i, tasks: %i" % (self.__pool._ThreadPool__maxThreads, len(self.__pool._ThreadPool__threads), self.__pool.occupied_threads, len(self.__pool._ThreadPool__tasks))) cmd(args) self.__pool.occupied_threads-=1 else: self.__pool.occupied_threads+=1 #logging.debug("max: %i, running: %i, used: %i, tasks: %i" % (self.__pool._ThreadPool__maxThreads, len(self.__pool._ThreadPool__threads), self.__pool.occupied_threads, len(self.__pool._ThreadPool__tasks))) callback(cmd(args)) self.__pool.occupied_threads-=1 start_time = time.time() def goAway(self): """ Exit the run loop next time through.""" self.__isDying = True #if HAS_LUCENE: # l_threadclass = PyLucene.PythonThread #else: # l_threadclass = threading.Thread #this class will never get called if we don't have lucene, but we need to declare it #even if we don't have the library (no preprocessors in python) #class LuceneThreadPoolThread(l_threadclass): # # """ Pooled thread class. """ # # threadSleepTime = 0.5 # # def __init__(self, pool, n="LuceneThreadPoolThread"): # # """ Initialize the thread and remember the pool. """ # # l_threadclass.__init__(self,name=n) # self.__pool = pool # self.__isDying = False # # def run(self): # # """ Until told to quit, retrieve the next task and execute # it, calling the callback if any. """ # # while self.__isDying == False: # cmd, args, callback = self.__pool.getNextTask() # # If there's nothing to do, just sleep a bit # if cmd is None: # time.sleep(LuceneThreadPoolThread.threadSleepTime) # elif callback is None: # self.__pool.occupied_threads+=1 # cmd(args) # self.__pool.occupied_threads-=1 # else: # self.__pool.occupied_threads+=1 # callback(cmd(args)) # self.__pool.occupied_threads-=1 # # def goAway(self): # # """ Exit the run loop next time through.""" # # self.__isDying = True PenguinTV-4.2.0/penguintv/itunes.py0000644000000000000000000000751611311477710014175 0ustar # itunes.py # Written by Owen Williams, (c) 2007 # see LICENSE for license information # # iTunes has very strange weblinks, but they are not that hard to read. # A "viewPodcast" link returns a gzipped web page that contains a link that # iTunes can load. Although the protocol of this link is itms://, we can # load it with http. This time we get a gzipped xml file, and toward the # bottom of the file is a simple key / value pair for episodeURL. This # url is what the podcast author has told itunes to use, and it'll be regular # RSS (we hope). import sys import gzip import urllib import HTMLParser import logging from xml.sax import saxutils, make_parser from xml.sax.handler import feature_namespaces def is_itms_url(url): if url.lower().startswith("itms://"): return True def is_itunes_url(url): """ Two simple checks to see if this is a valid itunes url: (ie, http://phobos.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?id=207870198) * does it contain "phobos.apple.com", and * does it contain "viewPodcast" There's also another form, as in http://www.itunes.com/podcast?id=207870198""" if url.lower().startswith("itms://"): return True if "apple.com/" in url.lower() and "viewPodcast" in url: return True if "itunes.com/podcast" in url.lower(): return True return False def get_rss_from_itunes(url): if not is_itunes_url(url): raise ItunesError, "not an itunes url" if not is_itms_url(url): url2 = get_itms_url(url) return get_podcast_url(url2) else: url2 = url.replace("itms://", "http://") return get_podcast_url(url2) def get_itms_url(url): # Part 1, get the itunes "webpage" for this feed # we have to save the file because urlopen doesn't support seeking filename, message = urllib.urlretrieve(url) #uncompressed = gzip.GzipFile(filename=filename, mode='r') uncompressed = open(filename, 'r') parser = viewPodcastParser() parser.feed(uncompressed.read()) if parser.url is None: raise ItunesError, "error getting viewpodcast url from itunes" return parser.url def get_podcast_url(url): # Part 2, find the actual rss link in the itunes "webpage" filename, message = urllib.urlretrieve(url) #uncompressed = gzip.GzipFile(filename=filename, mode='r') uncompressed = open(filename, 'r') parser = make_parser() parser.setFeature(feature_namespaces, 0) handler = itunesHandler() parser.setContentHandler(handler) parser.parse(uncompressed) if handler.url is None: raise ItunesError, "error finding podcast url" return handler.url class viewPodcastParser(HTMLParser.HTMLParser): def __init__(self): HTMLParser.HTMLParser.__init__(self) self.url = None def handle_starttag(self, tag, attrs): new_attrs = [] if tag.upper() == "BODY": for attr, val in attrs: if attr == "onload": url = val[val.find("itms://") + 4:] url = url[:url.find("'")] url = "http" + url self.url = url try: from xml.sax.handler import ContentHandler def_handler = ContentHandler except: try: from xml.sax.saxutils import DefaultHandler def_handler = DefaultHandler except Exception, e: logging.error("couldn't get xml parsing") raise e class itunesHandler(def_handler): def __init__(self): self.url = "" self._in_key = None self._in_value = None self._last_key = None def startElement(self, name, attrs): if name == 'key': self._in_key = "" elif name == 'string': self._in_value = "" def endElement(self, name): if name == 'key': self._last_key = self._in_key self._in_key = None elif name == 'string': if self._last_key == 'feedURL': self.url = self._in_value self._in_value = None def characters(self, ch): if self._in_key is not None: self._in_key += ch elif self._in_value is not None: self._in_value += ch class ItunesError(Exception): def __init__(self, m): self.m = m def __str__(self): return m PenguinTV-4.2.0/penguintv/subProcess.py0000644000000000000000000001047210646750251015015 0ustar # Written by Owen Williams # see LICENSE for license information # Example: # # import subProcess # process = subProcess.subProcess("your shell command") # process.read() #timeout is optional # handle(process.outdata, process.errdata) # del(process) import time, os, select, signal class subProcess: """Class representing a child process. It's like popen2.Popen3 but there are three main differences. 1. This makes the new child process group leader (using setpgrp()) so that all children can be killed. 2. The output function (read) is optionally non blocking returning in specified timeout if nothing is read, or as close to specified timeout as possible if data is read. 3. The output from both stdout & stderr is read (into outdata and errdata). Reading from multiple outputs while not deadlocking is not trivial and is often done in a non robust manner.""" def __init__(self, cmd, bufsize=8192): """The parameter 'cmd' is the shell command to execute in a sub-process. If the 'bufsize' parameter is specified, it specifies the size of the I/O buffers from the child process.""" self.cleaned=False self.BUFSIZ=bufsize self.outr, self.outw = os.pipe() self.errr, self.errw = os.pipe() self.pid = os.fork() if self.pid == 0: self._child(cmd) os.close(self.outw) #parent doesn't write so close os.close(self.errw) # Note we could use self.stdout=fdopen(self.outr) here # to get a higher level file object like popen2.Popen3 uses. # This would have the advantages of auto handling the BUFSIZ # and closing the files when deleted. However it would mean # that it would block waiting for a full BUFSIZ unless we explicitly # set the files non blocking, and there would be extra uneeded # overhead like EOL conversion. So I think it's handier to use os.read() self.outdata = self.errdata = '' self._outeof = self._erreof = 0 def _child(self, cmd): # Note sh below doesn't setup a seperate group (job control) # for non interactive shells (hmm maybe -m option does?) os.setpgrp() #seperate group so we can kill it os.dup2(self.outw,1) #stdout to write side of pipe os.dup2(self.errw,2) #stderr to write side of pipe #stdout & stderr connected to pipe, so close all other files map(os.close,[self.outr,self.outw,self.errr,self.errw]) try: cmd = ['/bin/sh', '-c', cmd] os.execvp(cmd[0], cmd) finally: #exit child on error os._exit(1) def read(self, timeout=None): """return 0 when finished else return 1 every timeout seconds data will be in outdata and errdata""" currtime=time.time() while 1: tocheck=[] if not self._outeof: tocheck.append(self.outr) if not self._erreof: tocheck.append(self.errr) ready = select.select(tocheck,[],[],timeout) if len(ready[0]) == 0: #no data timeout return 1 else: if self.outr in ready[0]: outchunk = os.read(self.outr,self.BUFSIZ) if outchunk == '': self._outeof = 1 self.outdata += outchunk if self.errr in ready[0]: errchunk = os.read(self.errr,self.BUFSIZ) if errchunk == '': self._erreof = 1 self.errdata += errchunk if self._outeof and self._erreof: return 0 elif timeout: if (time.time()-currtime) > timeout: return 1 #may be more data but time to go def kill(self): os.kill(-self.pid, signal.SIGTERM) #kill whole group def cleanup(self): """Wait for and return the exit status of the child process.""" self.cleaned=True os.close(self.outr) os.close(self.errr) pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self.sts = sts return self.sts def __del__(self): if not self.cleaned: self.cleanup() PenguinTV-4.2.0/penguintv/MainWindow.py0000644000000000000000000020757711406151435014751 0ustar import gtk import gtk.glade import gobject import sys, os, os.path import logging try: set except: from sets import Set as set import ptvDB import penguintv import Player import utils import Downloader #from trayicon.SonataNotification import TrayIconTips as tooltips if utils.RUNNING_HILDON: import hildon #status of the main window progress bar U_NOBODY=0 U_DOWNLOAD=1 U_LOADING=2 U_POLL=3 U_STANDARD=4 #states S_DEFAULT = 0 S_MANUAL_SEARCH = 1 S_TAG_SEARCH = 2 S_MAJOR_DB_OPERATION = 3 #filter model F_FAVORITE = 0 F_NAME = 1 F_DISPLAY = 2 F_TYPE = 3 #notebook tabs N_FEEDS = 0 N_PLAYER = 1 N_DOWNLOADS = 2 import FeedList, PlanetView, DownloadView, EntryFormatter if utils.HAS_SEARCH: import FeedFilterPropertiesDialog import AddSearchTagDialog import EditSearchesDialog import FeedFilterDialog if not utils.RUNNING_HILDON: import SynchronizeDialog import EntryList, EntryView if utils.HAS_GSTREAMER: import GStreamerPlayer class MainWindow(gobject.GObject): COLUMN_TITLE = 0 COLUMN_ITEM = 1 COLUMN_BOLD = 2 COLUMN_STICKY_FLAG = 3 __gsignals__ = { 'player-show': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'player-hide': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])) } def __init__(self, app, use_internal_player=False, window=None, status_icon=None, playlist=None): gobject.GObject.__init__(self) self._app = app self._window_inited = False self._mm = self._app.mediamanager self._glade_prefix = utils.get_glade_prefix() self._widgetTree = None self._menu_widgettree = None self.window_maximized = False self.changing_layout=False self.layout='standard' self._bar_owner = U_NOBODY self._status_owner = U_NOBODY self._state = S_DEFAULT self._fullscreen = False self._fullscreen_lock = False if playlist is None: self._playlist_filename = os.path.join(utils.get_home(), "gst_playlist.pickle") else: self._playlist_filename = playlist self._use_internal_player = False if utils.HAS_GSTREAMER and use_internal_player: self._use_internal_player = True self._status_icon = status_icon self._active_filter_name = FeedList.BUILTIN_TAGS[FeedList.ALL] self._active_filter_index = FeedList.ALL self._active_filter_path = (0,) if not utils.RUNNING_SUGAR and not utils.RUNNING_HILDON: pixbuf = gtk.gdk.pixbuf_new_from_file(utils.get_image_path('ev_online.png')) source = gtk.IconSource() source.set_pixbuf(pixbuf) source.set_size(gtk.ICON_SIZE_DIALOG) source.set_size_wildcarded(False) self._connected_iconset = gtk.IconSet() self._connected_iconset.add_source(source) pixbuf = gtk.gdk.pixbuf_new_from_file(utils.get_image_path('ev_offline.png')) source = gtk.IconSource() source.set_pixbuf(pixbuf) source.set_size(gtk.ICON_SIZE_DIALOG) source.set_size_wildcarded(False) self._disconnected_iconset = gtk.IconSet() self._disconnected_iconset.add_source(source) ##other WINDOWS we open if utils.HAS_SEARCH: self._window_add_search = AddSearchTagDialog.AddSearchTagDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'extra_dialogs.glade'), "window_add_search_tag",'penguintv'),self._app) self._feed_filter_properties_dialog = FeedFilterPropertiesDialog.FeedFilterPropertiesDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'extra_dialogs.glade'), "window_filter_properties",'penguintv'),self._app) if not utils.RUNNING_SUGAR and not utils.RUNNING_HILDON: self._sync_dialog = SynchronizeDialog.SynchronizeDialog(os.path.join(self._glade_prefix,'extra_dialogs.glade'), self._app) self._window_add_feed = None self._filter_selector_dialog = None self._feed_properties_dialog = None #signals self._app.connect('feed-added', self.__feed_added_cb) self._app.connect('feed-removed', self.__feed_removed_cb) self._app.connect('feed-polled', self.__feed_polled_cb) self._app.connect('download-finished', self.__download_finished_cb) self._app.connect('setting-changed', self.__setting_changed_cb) self._app.connect('tags-changed', self.__tags_changed_cb) self._app.connect('app-loaded', self.__app_loaded_cb) self._app.connect('online-status-changed', self.__online_status_changed_cb) self._app.connect('state-changed', self.__state_changed_cb) #most of the initialization is done on Show() if utils.RUNNING_SUGAR: gobject.idle_add(self.Show, window) def __link_activated_cb(self, o, link): self._app.activate_link(link) def __feed_clicked_cb(self, o): if utils.RUNNING_HILDON: self.feed_tabs.set_current_page(1) def __entrylistview_list_resized_cb(self, entrylistview, new_width): if self.layout == "widescreen" and self.app_window is not None: listnview_width = self.app_window.get_size()[0] - self.feed_pane.get_position() if listnview_width - new_width < 400: #ie, entry view will be tiny self.entry_pane.set_position(listnview_width-400) #MAGIC NUMBER elif new_width > 20: #MAGIC NUMBER self.entry_pane.set_position(new_width) def __feed_added_cb(self, app, feed_id, success): if success: #HACK: we know it will already be selected #self.select_feed(feed_id) self.display_status_message(_("Feed Added")) gobject.timeout_add(2000, self.display_status_message, "") else: self.display_status_message(_("Error adding feed")) self.select_feed(feed_id) def __feed_polled_cb(self, app, feed_id, update_data): if not update_data.has_key('polling_multiple'): self.display_status_message(_("Feed Updated")) gobject.timeout_add(2000, self.display_status_message, "") def __feed_removed_cb(self, app, feed_id): self.update_filters() def __download_finished_cb(self, app, d): self._download_view.update_downloads() self.update_download_progress() def __setting_changed_cb(self, app, typ, datum, value): if datum == '/apps/penguintv/show_notifications': show_notifs_item = self._menu_widgettree.get_widget('show_notifications') if show_notifs_item.get_active() != value: show_notifs_item.set_active(value) def __tags_changed_cb(self, app, val): self.update_filters() def __app_loaded_cb(self, app): if utils.RUNNING_SUGAR: self._finish_sugar_toolbar() def __online_status_changed_cb(self, app, connected): if connected: if self._connection_button: #p = utils.get_image_path('ev_online.png') i = gtk.Image() i.set_from_icon_set(self._connected_iconset, gtk.ICON_SIZE_DIALOG) self._connection_button.set_image(i) else: if self._connection_button: #p = utils.get_image_path('ev_offline.png') i = gtk.Image() i.set_from_icon_set(self._disconnected_iconset, gtk.ICON_SIZE_DIALOG) self._connection_button.set_image(i) def update_downloads(self): self._download_view.update_downloads() # def __getitem__(self, key): # return self.widgets.get_widget(key) def Show(self, dock_widget = None): """shows the main window. if given a widget, it will put itself in the widget. otherwise load a regular application window""" #if not utils.HAS_MOZILLA and self.layout.endswith("planet"): # logging.warning("requested planet layout, but can't use because gtkmozembed isn't installed correctly (won't import)") # self.layout = "standard" if utils.RUNNING_SUGAR: #if we are in OLPC mode and just have to supply a widget... self._status_view = None self._disk_usage_widget = None self.app_window = None vbox = gtk.VBox() self._layout_dock = self.load_notebook() self._layout_dock.add(self.load_layout()) vbox.pack_start(self._notebook) self._status_view = MainWindow._my_status_view() vbox.pack_start(self._status_view, False, False) dock_widget.set_canvas(vbox) dock_widget.show_all() self.window = dock_widget self._connection_button = None #TODO fixme self._widgetTree = gtk.glade.XML(os.path.join(self._glade_prefix,'..','penguintv.glade'), 'toolbar_holder','penguintv') self.toolbar = self._load_sugar_toolbar() self.toolbar.show() for key in dir(self.__class__): #python insaneness if key[:3] == 'on_': self._widgetTree.signal_connect(key, getattr(self, key)) self.window.connect('key_press_event', self.on_app_key_press_event) elif utils.RUNNING_HILDON: logging.debug("Hildon: setting up UI") self._h_app = hildon.Program() self.window = hildon.Window() #self.window.set_title("PenguinTV "+utils.VERSION) self.window.set_wmclass("penguintv","penguintv") try: gtk.set_application_name("PenguinTV "+utils.VERSION) except: logging.warning("set application name failed, old pymaemo?") self.window.set_icon_from_file(utils.get_image_path('penguintvicon.png')) self._status_view = None self._disk_usage_widget = None self.app_window = None vbox = gtk.VBox() self._layout_dock = self.load_notebook() self._layout_dock.add(self.load_layout()) self._status_view = MainWindow._my_status_view() self._connection_button = None logging.debug("Hildon: getting toolbar") self._widgetTree = gtk.glade.XML(os.path.join(self._glade_prefix,'hildon.glade'), 'hildon_toolbar_holder','penguintv') self.toolbar = self._load_toolbar() self.toolbar.unparent() vbox.pack_start(self._notebook) vbox.pack_start(self._status_view, False, False) self.window.add(vbox) self.window.add_toolbar(self.toolbar) self._h_app.add_window(self.window) self._menu_widgettree = gtk.glade.XML(os.path.join(self._glade_prefix,'hildon.glade'), 'hildon_menu','penguintv') menu = self._build_hildon_menu(self._menu_widgettree) self.window.set_menu(menu) show_notifs_item = self._menu_widgettree.get_widget('show_notifications') show_notifs_item.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/show_notifications', True)) self.window.show_all() if not utils.HAS_STATUS_ICON: show_notifs_item.hide() for key in dir(self.__class__): #python insaneness if key[:3] == 'on_': self._widgetTree.signal_connect(key, getattr(self, key)) self._menu_widgettree.signal_connect(key, getattr(self, key)) self.window.connect('destroy', self.on_app_destroy_event) self.window.connect('delete-event', self.on_app_delete_event) self.window.connect('key_press_event', self.on_app_key_press_event) else: #if we are loading in a regular window... self._load_app_window() if not utils.HAS_SEARCH: #remove UI elements that don't apply without search self._widgetTree.get_widget('saved_searches').hide() self._widgetTree.get_widget('separator11').hide() self._widgetTree.get_widget('reindex_searches').hide() self._widgetTree.get_widget('add_feed_filter').hide() #if not utils.HAS_MOZILLA: #self._widgetTree.get_widget('planet_layout').hide() if not utils.HAS_STATUS_ICON: self._widgetTree.get_widget('show_notifications').hide() self.window = self.app_window if self._use_internal_player: #From Sonata/dbus_plugin.py if utils.HAS_DBUS: import dbus try: bus = dbus.SessionBus() dbusObj = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/DBus') dbusInterface = dbus.Interface(dbusObj, 'org.freedesktop.DBus') if dbusInterface.NameHasOwner('org.gnome.SettingsDaemon'): try: # mmkeys for gnome 2.22+ settingsDaemonObj = bus.get_object('org.gnome.SettingsDaemon', '/org/gnome/SettingsDaemon/MediaKeys') settingsDaemonInterface = dbus.Interface(settingsDaemonObj, 'org.gnome.SettingsDaemon.MediaKeys') settingsDaemonInterface.GrabMediaPlayerKeys('PenguinTV', 0) except: # mmkeys for gnome 2.18+ settingsDaemonObj = bus.get_object('org.gnome.SettingsDaemon', '/org/gnome/SettingsDaemon') settingsDaemonInterface = dbus.Interface(settingsDaemonObj, 'org.gnome.SettingsDaemon') settingsDaemonInterface.GrabMediaPlayerKeys('PenguinTV', 0) settingsDaemonInterface.connect_to_signal('MediaPlayerKeyPressed', lambda app, key:self._dbus_mmkeys_cb(app, key)) except: pass else: try: import mmkeys keys = mmkeys.MmKeys() keys.connect("mm_prev", lambda x,y: self._gstreamer_player.prev()) keys.connect("mm_next", lambda x,y: self._gstreamer_player.next()) keys.connect("mm_playpause", lambda x,y: self._gstreamer_player.play_pause_toggle()) keys.connect("mm_stop", lambda x,y: self._gstreamer_player.stop()) except: logging.debug("Multimedia Key Support not found") self._notebook.show_only(N_FEEDS) if not utils.HAS_SEARCH: self.search_container.hide_all() if utils.RUNNING_HILDON: self._layout_components.get_widget('filter_label').hide() self._filter_unread_checkbox.hide() #if not utils.USE_TAGGING: # self._filter_container.hide_all() if self._use_internal_player: if self._gstreamer_player.get_queue_count() > 0: self._notebook.show_page(N_PLAYER) self.emit('player-show') if not self._window_inited: gobject.idle_add(self._app.post_show_init) self._window_inited = True return False def _dbus_mmkeys_cb(self, app, key): if app == 'PenguinTV': if key in ("Play", "PlayPause", "Pause"): key = "PlayPause" self._app.player.control_internal(key) def _build_hildon_menu(self, widgets): menu = gtk.Menu() menuitem = widgets.get_widget('file_menu') menuitem.unparent() menu.append(menuitem) menuitem = widgets.get_widget('edit_menu') menuitem.unparent() menu.append(menuitem) menuitem = widgets.get_widget('view_menu') menuitem.unparent() menu.append(menuitem) menuitem = widgets.get_widget('go_menu') menuitem.unparent() menu.append(menuitem) menuitem = widgets.get_widget('feed_menu') menuitem.unparent() menu.append(menuitem) menuitem = widgets.get_widget('help_menu') menuitem.unparent() menu.append(menuitem) separator = gtk.SeparatorMenuItem() menu.append(separator) item = gtk.ImageMenuItem('gtk-close') item.connect('activate', self.on_app_delete_event) menu.append(item) return menu def _load_toolbar(self): toolbar = self._widgetTree.get_widget('toolbar1') #set up separator (see below) vseparator = self._widgetTree.get_widget('vseparator1') vseparator_toolitem = self._widgetTree.get_widget('toolitem1') vseparator_toolitem.set_expand(True) vseparator.set_draw(False) self._disk_usage_widget = self._widgetTree.get_widget('disk_usage') self._disk_usage_widget.set_use_markup(True) return toolbar def _load_sugar_toolbar(self): from sugar.graphics.toolbutton import ToolButton from sugar.graphics.palette import Palette toolbar = gtk.Toolbar() # Add Feed Palette (initialized later when the dialogs are set up) self.sugar_add_button = ToolButton('gtk-add') toolbar.insert(self.sugar_add_button, -1) self.sugar_add_button.show() # Remove Feed self._sugar_remove_button = ToolButton('gtk-remove') vbox = gtk.VBox() #vbox.set_size_request(300, 200) label = gtk.Label(_('Really remove feed?')) vbox.pack_start(label) hbox = gtk.HBox() expander_label = gtk.Label(' ') hbox.pack_start(expander_label) b = gtk.Button('gtk-remove') b.set_use_stock(True) b.connect('clicked', self.on_remove_feed_activate, True) hbox.pack_start(b, False) vbox.pack_start(hbox) palette = Palette(_('Remove Feed')) palette.set_content(vbox) vbox.show_all() self._sugar_remove_button.set_palette(palette) toolbar.insert(self._sugar_remove_button, -1) self._sugar_remove_button.show() # Refresh Feeds b = gtk.ToolButton('gtk-refresh') b.connect('clicked', self.on_feeds_poll_clicked) toolbar.insert(b, -1) b.show() # Download Media b = gtk.ToolButton('gtk-go-down') b.connect('clicked', self.on_download_unviewed_clicked) toolbar.insert(b, -1) b.show() # Separator sep = gtk.SeparatorToolItem() toolbar.insert(sep, -1) sep.show() # Preferences self._sugar_prefs_button = ToolButton('gtk-preferences') toolbar.insert(self._sugar_prefs_button, -1) self._sugar_prefs_button.show() return toolbar def _finish_sugar_toolbar(self): from sugar.graphics.toolbutton import ToolButton from sugar.graphics.palette import Palette if self._window_add_feed is None: self.show_window_add_feed() self.hide_window_add_feed() content = self._window_add_feed.extract_content() palette = Palette(_('Add Feed')) palette.set_content(content) self.sugar_add_button.set_palette(palette) content = self._app.window_preferences.extract_content() palette = Palette(_('Preferences')) palette.set_content(content) self._sugar_prefs_button.set_palette(palette) class _my_status_view(gtk.HBox): def __init__(self, homogeneous=False, spacing=0): gtk.HBox.__init__(self, homogeneous=False, spacing=0) self._progress = gtk.ProgressBar() sep = gtk.VSeparator() self._status = gtk.Label() self.pack_start(self._progress, False, False) self.pack_start(sep, False, False) self.pack_start(self._status, False, False) def get_status(self): return self._status def set_status(self, m): self._status.set_text(m) if utils.RUNNING_HILDON: if m == "": self._status.hide() else: self._status.show() def set_progress_percentage(self, p): self._progress.set_fraction(p) if utils.RUNNING_HILDON: if p == 0.0: self._progress.hide() else: self._progress.show() def get_progress_percentage(self): return self._progress.get_fraction() def _load_app_window(self): self._widgetTree = gtk.glade.XML(os.path.join(self._glade_prefix,'desktop.glade'), 'app','penguintv') self._menu_widgettree = self._widgetTree notebook_dock = self._widgetTree.get_widget('layout_dock') self.app_window = self._widgetTree.get_widget('app') fancy_feedlist_item = self._menu_widgettree.get_widget('fancy_feed_display') fancy_feedlist_item.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/fancy_feedlist', True)) show_notifs_item = self._menu_widgettree.get_widget('show_notifications') show_notifs_item.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/show_notifications', True)) self._widgetTree.get_widget(self.layout+"_layout").set_active(True) self.app_window.set_icon_from_file(utils.get_image_path('penguintvicon.png')) #status_box = self._widgetTree.get_widget("status_hbox") #self._status_view = MainWindow._my_status_view() #status_box.pack_start(self._status_view) self._status_view = self._widgetTree.get_widget('appbar') self._load_toolbar() self._connection_button = self._widgetTree.get_widget('connection_button') #p = utils.get_image_path('ev_online.png') i = gtk.Image() i.set_from_icon_set(self._connected_iconset, gtk.ICON_SIZE_DIALOG) self._connection_button.set_image(i) #load the layout self._layout_dock = self.load_notebook() notebook_dock.add(self._notebook) self._layout_dock.add(self.load_layout()) #sizing for the window comes from gconf x = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_position_x', 40) y = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_position_y', 40) if x < 0: x = 0 if y < 0: y = 0 self.app_window.move(x,y) w = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_size_x', 800) h = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_size_y', 500) def do_maximize(): self.app_window.maximize() self.window_maximized = True return False if w<0 or h<0: #very cheesy. negative values really means "maximize" self.app_window.resize(abs(w),abs(h)) #but be good and don't make assumptions about negativity gobject.idle_add(do_maximize) else: self.app_window.resize(w,h) if self.layout.endswith("planet"): self._menu_widgettree.get_widget('entry_menu_item').hide() else: self._menu_widgettree.get_widget('entry_menu_item').show() self.app_window.show_all() for key in dir(self.__class__): #python insaneness if key[:3] == 'on_': self._widgetTree.signal_connect(key, getattr(self, key)) def show_window_add_feed(self, autolocation=True): import AddFeedDialog if self._window_add_feed is None: if utils.RUNNING_HILDON: self._window_add_feed = AddFeedDialog.AddFeedDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'hildon_dialog_add_feed.glade'), "window_add_feed",'penguintv'),self._app) #MAGIC else: self._window_add_feed = AddFeedDialog.AddFeedDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'dialog_add_feed.glade'), "window_add_feed",'penguintv'),self._app) #MAGIC self._window_add_feed.show(autolocation) def hide_window_add_feed(self): if self._window_add_feed is None: self.show_window_add_feed() self._window_add_feed.hide() def set_window_add_feed_location(self, url): assert self._window_add_feed is not None self._window_add_feed.set_location(url) def load_notebook(self): self._notebook = NotebookManager() self._notebook.set_property('tab-border',0) if utils.RUNNING_HILDON: label = gtk.Label(_('Feeds')) else: label = gtk.Label(_('Feeds')) label.set_property('use-markup',True) vbox = gtk.VBox() self._notebook.append_page(vbox, label) p_vbox = gtk.VBox() if self._use_internal_player: self._gstreamer_player = GStreamerPlayer.GStreamerPlayer(p_vbox, self._playlist_filename, tick_interval=5) self._gstreamer_player.connect('item-queued', self._on_player_item_queued) self._gstreamer_player.connect('items-removed', self._on_player_items_removed) self._gstreamer_player.Show() self.emit('player-show') if utils.RUNNING_HILDON: self._player_label = gtk.Label(_('Player')) else: self._player_label = gtk.Label(''+_('Player')+'') self._player_label.set_property('use-markup',True) self._notebook.append_page(p_vbox, self._player_label) if utils.RUNNING_HILDON: self._downloads_label = gtk.Label(_('Downloads')) else: self._downloads_label = gtk.Label(_('Downloads')) self._downloads_label.set_property('use-markup',True) self._download_view = DownloadView.DownloadView(self._app, self._mm, self._app.db, os.path.join(self._glade_prefix,'dialogs.glade')) self._notebook.append_page(self._download_view.get_widget(), self._downloads_label) #self._notebook.set_show_tabs(False) self._notebook.set_property('show-border', False) self._notebook.connect('realize', self._on_notebook_realized) self._notebook.connect('switch-page', self._on_notebook_page_selected) self._notebook.show_all() return vbox def get_gst_player(self): try: return self._gstreamer_player except: logging.warning("no gstreamer player to get") return None def notebook_select_page(self, page): self._notebook.set_current_page(page) def load_layout(self): components = gtk.glade.XML(os.path.join(self._glade_prefix,self.layout+'.glade'), self.layout+'_layout_container','penguintv') #MAGIC self._layout_components = components self._layout_container = components.get_widget(self.layout+'_layout_container') #dock_widget.add(self._layout_container) fancy = self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/fancy_feedlist', True) if utils.RUNNING_SUGAR: fancy = False elif utils.RUNNING_HILDON: fancy = True self.feed_list_view = FeedList.FeedList(components,self._app, fancy) #renderer = EntryFormatter.MOZILLA #renderer = EntryFormatter.GTKHTML if utils.RUNNING_HILDON: renderer = EntryFormatter.GTKHTML elif utils.HAS_MOZILLA: renderer = EntryFormatter.MOZILLA else: logging.warning("Mozilla (gtkmozembed) not found, falling back to GTKHTML") renderer = EntryFormatter.GTKHTML # #if self.layout.endswith("planet") and renderer != EntryFormatter.MOZILLA: # self.layout = "standard" # return self.load_layout() if not self.layout.endswith("planet"): self.entry_list_view = EntryList.EntryList(components, self._app, self.feed_list_view, self) self.entry_view = EntryView.EntryView(components, self.feed_list_view, self.entry_list_view, self._app, self, renderer) else: #self.entry_view = PlanetView.PlanetView(components, self.feed_list_view, # self._app, self, self._app.db, renderer) self.entry_view = PlanetView.PlanetView(components.get_widget('html_dock'), self, self._app.db, utils.get_share_prefix(), self.feed_list_view, self._app, renderer) self.entry_list_view = self.entry_view for key in dir(self.__class__): #python insaneness if key[:3] == 'on_': components.signal_connect(key, getattr(self, key)) #some more signals self.feed_list_view.connect('link-activated', self.__link_activated_cb) self.feed_list_view.connect('feed-clicked', self.__feed_clicked_cb) if not self.layout.endswith("planet"): self.entry_list_view.connect('entrylist-resized', self.__entrylistview_list_resized_cb) #if we connected this in planetview, we'd activate links twice self.entry_list_view.connect('link-activated', self.__link_activated_cb) self.entry_view.connect('link-activated', self.__link_activated_cb) #major WIDGETS if not utils.RUNNING_HILDON: self.feed_pane = components.get_widget('feed_pane') else: self.feed_tabs = components.get_widget('feed_tabs') self.feed_tabs.set_current_page(0) self.feed_pane = None self._feedlist = components.get_widget('feedlistview') if self.layout.endswith("planet"): self.entry_pane = self.feed_pane #cheat else: self.entry_pane = components.get_widget('entry_pane') self._filter_container = components.get_widget('filter_container') self._filter_unread_checkbox = components.get_widget('unread_filter') self._filter_tree = gtk.TreeStore(str, #filter displayable str, #filter name int, #seperator bool) #visible eventbox = components.get_widget('filter_selector_eventbox') self._filter_selector_combo = gtk.ComboBox(self._filter_tree) cell = gtk.CellRendererText() if utils.RUNNING_HILDON: cell.set_property("size-points", 24) self._filter_selector_combo.pack_start(cell, True) self._filter_selector_combo.add_attribute(cell, 'text', 0) self._filter_selector_combo.add_attribute(cell, 'sensitive', 3) self._filter_selector_combo.connect('changed', self.on_filter_changed) eventbox.add(self._filter_selector_combo) self._filter_selector_combo.set_model(self._filter_tree) self._filter_selector_combo.set_row_separator_func(lambda model,iter:model[iter][2]==1) self._filters = [] #text, text to display, type, tree path self._favorite_filters = [] #text, text to display, type self.search_entry = components.get_widget('search_entry') completion = gtk.EntryCompletion() completion_model = gtk.ListStore(str, str, int) #name, display, index completion.set_model(completion_model) renderer = gtk.CellRendererText() completion.pack_start(renderer) completion.add_attribute(renderer, 'text', 1) def match_func(comp, string, iter): try: return comp.get_model()[iter][0].upper().startswith(string.upper()) except: return False completion.set_match_func(match_func) #completion.set_text_column(0) completion.connect('match-selected',self._on_completion_match_selected, 2) self.search_entry.set_completion(completion) self.search_container = components.get_widget('search_container') self.update_filters() #dnd self._TARGET_TYPE_TEXT = 80 self._TARGET_TYPE_URL = 81 drop_types = [ ('text/x-moz-url',0,self._TARGET_TYPE_URL), ('text/unicode',0,self._TARGET_TYPE_TEXT), ('text/plain',0,self._TARGET_TYPE_TEXT)] self._feedlist.drag_dest_set(gtk.DEST_DEFAULT_ALL, drop_types, gtk.gdk.ACTION_COPY) if not self.layout.endswith("planet"): val = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/entry_pane_position', 370) if val < 10: val = 50 self.entry_pane.set_position(val) if not utils.RUNNING_HILDON: f_p_default = 370 val = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/feed_pane_position', f_p_default) if val < 10: val=50 if self.feed_pane is not None: self.feed_pane.connect('realize', self._on_feed_pane_realized, val) if not self.changing_layout: self.set_active_filter(FeedList.ALL) val = self._app.db.get_setting(ptvDB.STRING, '/apps/penguintv/default_filter') if val is not None: try: filter_index = [row[F_NAME] for row in self._filters].index(val) cur_filter = self._filters[filter_index] if utils.HAS_SEARCH: if cur_filter[F_TYPE] == ptvDB.T_SEARCH or filter_index==FeedList.SEARCH: self.set_active_filter(FeedList.ALL) else: self.set_active_filter(filter_index) else: self.set_active_filter(filter_index) except ValueError: #didn't find the item in the model (.index(val) fails) self.set_active_filter(FeedList.ALL) else: self.set_active_filter(FeedList.ALL) else: self.set_active_filter(self._active_filter_index) #sys.stderr.write("done") return self._layout_container def Hide(self): if self.app_window: self.app_window.hide() del self._widgetTree del self.feed_list_view del self.entry_list_view del self.entry_view #some widgets if self.feed_pane is not None: del self.feed_pane del self._feedlist if not self.layout.endswith("planet"): del self.entry_pane del self.app_window del self._status_view del self._disk_usage_widget def get_parent(self): return self.window def on_toggle_fullscreen_activate(self, event=None): self.toggle_fullscreen() def toggle_fullscreen(self): #don't fullscreen under these exceptions if self._notebook.get_current_page() == N_PLAYER: assert self._gstreamer_player is not None #if self._notebook.get_current_page() == N_DOWNLOADS: # return #maemo throws X Window System errors when doing this -- ignore them #http://labs.morpheuz.eng.br/blog/14/08/2007/xv-and-mplayer-on-maemo/ if utils.RUNNING_HILDON: gtk.gdk.error_trap_push() self._fullscreen = not self._fullscreen if self._fullscreen: self._do_fullscreen() else: self._do_unfullscreen() if utils.RUNNING_HILDON: while gtk.events_pending(): gtk.main_iteration() gtk.gdk.flush() gtk.gdk.error_trap_pop() def _do_fullscreen(self): if self._notebook.get_current_page() == N_PLAYER: pixmap = gtk.gdk.Pixmap(None, 1, 1, 1) color = gtk.gdk.Color() cursor = gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0) self.window.window.set_cursor(cursor) if self._use_internal_player: if self._gstreamer_player: self._gstreamer_player.toggle_controls(True) if utils.HAS_SEARCH: self.search_container.hide_all() #elif self._notebook.get_current_page() == N_FEEDS: if self.feed_pane is not None: self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/feed_pane_position', self.feed_pane.get_position()) if self.entry_pane is not None: self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/entry_pane_position', self.entry_pane.get_position()) if not utils.RUNNING_HILDON: if self.layout.endswith('planet'): self.entry_pane.set_position(0) else: self.feed_pane.set_position(0) self._notebook.set_keep_hidden(True) self._widgetTree.get_widget('toolbar1').hide() if utils.RUNNING_SUGAR: self._status_view.hide() elif utils.RUNNING_HILDON: self.window.fullscreen() else: self._widgetTree.get_widget('menubar2').hide() self._widgetTree.get_widget('status_hbox').hide() self._filter_container.hide_all() self.app_window.fullscreen() def _do_unfullscreen(self): if self._fullscreen_lock: return self._fullscreen_lock = True #if self._notebook.get_current_page() == N_PLAYER: self.window.window.set_cursor(None) if self._use_internal_player: if self._gstreamer_player is not None: self._gstreamer_player.toggle_controls(False) if utils.HAS_SEARCH: self.search_container.show_all() #elif self._notebook.get_current_page() == N_FEEDS: if not utils.RUNNING_HILDON: if self.layout.endswith('planet'): val = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/entry_pane_position', 370) self.entry_pane.set_position(val) else: val = self._app.db.get_setting(ptvDB.INT, '/apps/penguintv/feed_pane_position', 370) self.feed_pane.set_position(val) self._notebook.set_keep_hidden(False) #don't show the toolbar if we are on hildon and we are in the player pagenum = self._notebook.get_current_page() if not utils.RUNNING_HILDON or not pagenum == N_PLAYER: self._widgetTree.get_widget('toolbar1').show_all() def _unfullscreen_finish(): self.app_window.unfullscreen() self._fullscreen_lock = False return False if utils.RUNNING_SUGAR: self._status_view.show() self._fullscreen_lock = False elif utils.RUNNING_HILDON: self.window.unfullscreen() self._fullscreen_lock = False else: self._widgetTree.get_widget('menubar2').show_all() self._widgetTree.get_widget('status_hbox').show_all() self._filter_container.show_all() gobject.idle_add(_unfullscreen_finish) def on_about_activate(self,event): widgets = gtk.glade.XML(os.path.join(self._glade_prefix,'dialogs.glade'), "aboutdialog1",'penguintv') about_box = widgets.get_widget('aboutdialog1') about_box.set_name('PenguinTV') about_box.set_version(utils.VERSION) about_box.connect('response', self.on_about_response) about_box.show_all() def on_about_response(self, widget, event): widget.destroy() def on_app_delete_event(self, event, data=None): self._app.do_quit() def gtkquit(): if not self._app.is_quit_complete(): return True logging.debug('hildon main_quit') gtk.main_quit() return False if utils.RUNNING_HILDON: gobject.timeout_add(250, gtkquit) return self.window.hide_on_delete() def on_app_destroy_event(self,event,data=None): if utils.RUNNING_HILDON: gtk.main_quit() else: self._app.do_quit() def on_app_window_state_event(self, client, event): if event.new_window_state & gtk.gdk.WINDOW_STATE_MAXIMIZED: self.window_maximized = True elif event.new_window_state & gtk.gdk.WINDOW_STATE_MAXIMIZED == 0: self.window_maximized = False def on_add_feed_activate(self, event=None): if self._state == S_MAJOR_DB_OPERATION: logging.warning("Please wait until feeds have loaded before adding a new one") return self._notebook.set_current_page(N_FEEDS) self.show_window_add_feed() #not modal / blocking def on_add_feed_filter_activate(self,event): selected = self.feed_list_view.get_selected() if selected: title = self._app.db.get_feed_title(selected) dialog = FeedFilterDialog.FeedFilterDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'extra_dialogs.glade'), "window_feed_filter",'penguintv'),self._app) dialog.show() dialog.set_pointed_feed(selected,title) d = { 'title':title } dialog.set_filter_name(_("%(title)s Filtered" % d)) else: dialog = gtk.Dialog(title=_("No Feed Selected"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Please select the feed you would like to filter")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self._app.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog def on_connection_button_clicked(self, event): self._app.toggle_net_connection() def pane_to_feeds(self): self.feed_tabs.set_current_page(0) def on_feed_add_clicked(self, event): if self._state == S_MAJOR_DB_OPERATION: logging.warning("Please wait until feeds have loaded before adding a new one") return self.show_window_add_feed() #not modal / blocking #def on_feed_pane_expose_event(self, widget, event): # self.feed_list_view.resize_columns(self.feed_pane.get_position()) def on_feed_properties_activate(self, event): import FeedPropertiesDialog selected = self.feed_list_view.get_selected() if selected: #title, description, url, link feed_info = self._app.db.get_feed_info(selected) if self._feed_properties_dialog is None: if utils.RUNNING_HILDON: self._feed_properties_dialog = FeedPropertiesDialog.FeedPropertiesDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'hildon_dialogs.glade'), "window_feed_properties",'penguintv'),self._app) else: self._feed_properties_dialog = FeedPropertiesDialog.FeedPropertiesDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'dialogs.glade'), "window_feed_properties",'penguintv'),self._app) self._feed_properties_dialog.set_feedid(selected) self._feed_properties_dialog.set_title(feed_info['title']) self._feed_properties_dialog.set_rss(feed_info['url']) self._feed_properties_dialog.set_description(feed_info['description']) self._feed_properties_dialog.set_link(feed_info['link']) self._feed_properties_dialog.set_last_poll(feed_info['lastpoll']) self._feed_properties_dialog.set_tags(self._app.db.get_tags_for_feed(selected)) self._feed_properties_dialog.set_flags(self._app.db.get_flags_for_feed(selected)) if self._app.feed_refresh_method == penguintv.REFRESH_AUTO: self._feed_properties_dialog.set_next_poll(feed_info['lastpoll']+feed_info['pollfreq']) else: self._feed_properties_dialog.set_next_poll(feed_info['lastpoll']+self._app.polling_frequency) self._feed_properties_dialog.show() def on_feed_filter_properties_activate(self, event): selected = self.feed_list_view.get_selected() if selected: #title, description, url, link feed_info = self._app.db.get_feed_info(selected) self._feed_filter_properties_dialog.set_feed_id(selected) self._feed_filter_properties_dialog.set_pointed_feed_id(feed_info['feed_pointer']) self._feed_filter_properties_dialog.set_filter_name(feed_info['title']) self._feed_filter_properties_dialog.set_query(feed_info['description']) self._feed_filter_properties_dialog.show() def on_download_entry_activate(self, event): entry = self.entry_list_view.get_selected()['entry_id'] self._app.download_entry(entry) def on_download_unviewed_activate(self, event): self._app.download_unviewed() def on_download_unviewed_clicked(self,event): self._app.download_unviewed() def on_delete_entry_media_activate(self,event): selected = self.entry_list_view.get_selected()['entry_id'] self._app.delete_entry_media(selected) def on_delete_feed_media_activate(self,event): selected = self.feed_list_view.get_selected() if selected: self._app.delete_feed_media(selected) def on_edit_tags_for_all_activate(self, event): """Bring up mass tag creation window""" import TagEditorNG window_edit_tags_multi = TagEditorNG.TagEditorNG(gtk.glade.XML(os.path.join(self._glade_prefix,'dialogs.glade'), "dialog_tag_editor_ng",'penguintv'), self._app) window_edit_tags_multi.show() def on_export_opml_activate(self, event): self._app.export_opml() def _on_feed_pane_realized(self, widget, val): widget.set_position(val) def on_feedlistview_drag_data_received(self, widget, context, x, y, selection, targetType, time): widget.emit_stop_by_name('drag-data-received') if targetType == self._TARGET_TYPE_TEXT: url = "" for c in selection.data: if c != "\0": #for some reason ever other character is a null. what gives? url = url+c if url.split(':')[0] == 'feed': url = url[url.find(':')+1:] self._app.add_feed(url) elif targetType == self._TARGET_TYPE_URL: url = "" for c in selection.data[0:selection.data.find('\n')]: if c != '\0': url = url+c if url.split(':')[0] == 'feed': #stupid wordpress does 'feed:http://url.com/whatever' url = url[url.find(':')+1:] self._app.add_feed(url, url) def on_feeds_poll_clicked(self,event): self._app.poll_feeds() def set_hide_entries_menuitem(self, state): self._menu_widgettree.get_widget('hide_viewed_entries_cb').set_active(state) def set_hide_entries_visibility(self, state): if state: self._menu_widgettree.get_widget('hide_viewed_entries_cb').show() else: self._menu_widgettree.get_widget('hide_viewed_entries_cb').hide() def on_hide_entries_cb_toggled(self, event): self.entry_list_view.set_hide_viewed(self._menu_widgettree.get_widget('hide_viewed_entries_cb').get_active()) def on_hide_feeds_cb_toggled(self, checkbox): status = checkbox.get_active() self.feed_list_view.set_unread_toggle(status) self._menu_widgettree.get_widget('hide_viewed_feeds_cb').set_active(status) self._filter_unread_checkbox.set_active(status) def on_synchronize_button_clicked(self,event): self._sync_dialog.hide() self._sync_dialog.on_sync_button_clicked(event) def on_edit_favorite_tags(self, o=None): import FilterSelectorDialog if self._filter_selector_dialog is None: self._filter_selector_dialog = FilterSelectorDialog.FilterSelectorDialog(gtk.glade.XML(os.path.join(self._glade_prefix,'dialogs.glade'), "dialog_tag_favorites",'penguintv'),self) self._filter_selector_dialog.set_taglists(self._filters, self._favorite_filters) self._filter_selector_dialog.Show() def on_filter_changed(self, widget): model = widget.get_model() it = widget.get_active_iter() if it is None: return else: #if this is the edit tags menu item... if model[it][2] == 2: self.on_edit_favorite_tags() self._filter_selector_combo.set_active_iter(model.get_iter(self._active_filter_path)) return if model[it][1] == _('All Tags'): return names = [f[F_NAME] for f in self._filters] index = names.index(model[it][1]) if self._active_filter_index == index and not self.changing_layout: return self._active_filter_name = model[it][1] self._active_filter_index = index self._active_filter_path = model.get_path(it) if utils.HAS_SEARCH and index == FeedList.SEARCH: self._filter_tree[FeedList.SEARCH][3] = True else: self._filter_tree[FeedList.SEARCH][3] = False if utils.HAS_STATUS_ICON: self._filter_tree[FeedList.NOTIFY][3] = True else: self._filter_tree[FeedList.NOTIFY][3] = False self._activate_filter() def _find_path(self, index): model = self._filter_selector_combo.get_model() name = self._filters[index][F_NAME] self._active_filter_path = None #if utils.RUNNING_HILDON: # #not a tree, so some filters appear twice. Need to select first # #instance, which foreach does not do easily # for row in model: # if row[1] == name: # self._active_filter_path = row.path # break #else: def hunt_path(model, p, it): if model[it][1] == name and self._active_filter_path is None: self._active_filter_path = p model.foreach(hunt_path) def set_active_filter(self, index): model = self._filter_selector_combo.get_model() if utils.HAS_SEARCH and index == FeedList.SEARCH: self._filter_tree[FeedList.SEARCH][3] = True else: self._filter_tree[FeedList.SEARCH][3] = False if utils.HAS_STATUS_ICON: self._filter_tree[FeedList.NOTIFY][3] = True else: self._filter_tree[FeedList.NOTIFY][3] = False self._find_path(index) it = model.get_iter(self._active_filter_path) self._filter_selector_combo.set_active_iter(it) def _activate_filter(self): current_filter = self._filters[self._active_filter_index] if current_filter[F_TYPE] == ptvDB.T_SEARCH and self._state == S_MAJOR_DB_OPERATION: self.set_active_filter(FeedList.ALL) return self._app.change_filter(current_filter[F_NAME],current_filter[F_TYPE]) def on_import_opml_activate(self, event): if utils.RUNNING_HILDON: dialog = hildon.FileChooserDialog(self.window, action=gtk.FILE_CHOOSER_ACTION_OPEN) else: dialog = gtk.FileChooserDialog(_('Select OPML...'),None, action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) filter = gtk.FileFilter() filter.set_name("OPML files") filter.add_pattern("*.opml") dialog.add_filter(filter) filter = gtk.FileFilter() filter.set_name("All files") filter.add_pattern("*") dialog.add_filter(filter) dialog.set_transient_for(self._app.main_window.get_parent()) response = dialog.run() if response == gtk.RESPONSE_OK: f = open(dialog.get_filename(), "r") self.display_status_message(_("Importing Feeds, please wait...")) self._app.import_subscriptions(f) elif response == gtk.RESPONSE_CANCEL: logging.info('Closed, no files selected') dialog.destroy() def on_app_key_press_event(self, widget, event): keyname = gtk.gdk.keyval_name(event.keyval) #if event.state & gtk.gdk.CONTROL_MASK: # if keyname == 'k': # self.search_entry.grab_focus() if event.state & gtk.gdk.MOD1_MASK: if keyname == '1': self._notebook.set_current_page(N_FEEDS) elif keyname == '2': if self._notebook.is_showing(N_PLAYER): self._notebook.set_current_page(N_PLAYER) elif keyname == '3': if self._notebook.is_showing(N_DOWNLOADS): self._notebook.set_current_page(N_DOWNLOADS) if utils.RUNNING_SUGAR: if keyname == 'KP_Left' or keyname == 'Left' or keyname == 'KP_4': self.feed_list_view.grab_focus() elif keyname == 'KP_Right' or keyname == 'Right' \ or keyname == 'KP_6': self.entry_view.grab_focus() elif utils.RUNNING_HILDON: #Move up Arrow key up GDK_Up #Move down Arrow key down GDK_Down #Move left Arrow key left GDK_Left #Move right Arrow key right GDK_Right #Select, Confirm Return GDK_Return #Cancel, Close Esc GDK_Escape #Open menu F4 GDK_F4 #Full screen F6 GDK_F6 #Increase / Zoom in / Volume up F7 GDK_F7 #Decrease / Zoom out / Volume down F8 GDK_F8 if keyname == 'KP_Left' or keyname == 'Left': self.feed_list_view.grab_focus() elif keyname == 'KP_Right' or keyname == 'Right': self.entry_view.grab_focus() elif keyname == 'F6': self.toggle_fullscreen() elif keyname == 'F7': if self._gstreamer_player is not None: self._gstreamer_player.vol_up() elif keyname == 'F8': if self._gstreamer_player is not None: self._gstreamer_player.vol_down() elif keyname == 'Escape': self.feed_tabs.set_current_page(0) else: #regular desktop version.. if keyname == 'F11': self.toggle_fullscreen() #the key press will also trigger the accelerator once the menu #comes back -- stop it widget.stop_emission("key-press-event") else: if self._use_internal_player and self._notebook.get_current_page() == N_PLAYER: #if gstreamer can do something with this key, stop further #emission if self._gstreamer_player.handle_key(keyname): widget.stop_emission("key-press-event") if keyname == "F" and event.state & gtk.gdk.CONTROL_MASK: self._gstreamer_player.ff() elif keyname == "B" and event.state & gtk.gdk.CONTROL_MASK: self._gstreamer_player.rew() def on_mark_entry_as_viewed_activate(self,event): entry = self.entry_list_view.get_selected() self._app.mark_entry_as_viewed(entry['entry_id'], entry['feed_id']) def on_mark_entry_as_unviewed_activate(self,event): entry = self.entry_list_view.get_selected()['entry_id'] self._app.mark_entry_as_unviewed(entry) def on_keep_entry_new_activate(self, event): entry = self.entry_list_view.get_selected()['entry_id'] self._app.activate_link("keep:%i" % (entry,)) def on_unkeep_entry_new_activate(self, event): entry = self.entry_list_view.get_selected()['entry_id'] self._app.activate_link("unkeep:%i" % (entry,)) def on_mark_feed_as_viewed_activate(self, button=None, event=None): feed = self.feed_list_view.get_selected() if feed: self._app.mark_feed_as_viewed(feed) def on_mark_all_viewed_activate(self, event): self._app.mark_all_viewed() def _on_notebook_realized(self, widget): self._notebook.show_page(N_FEEDS) if not utils.HAS_SEARCH: self.search_container.hide_all() #if utils.RUNNING_SUGAR: # self._filter_container.hide_all() if self._use_internal_player: self._gstreamer_player.load() if self._gstreamer_player.get_queue_count() > 0: self._notebook.show_page(N_PLAYER) self.emit('player-show') def _on_notebook_page_selected(self, widget, page, pagenum): if utils.RUNNING_HILDON: if pagenum == N_PLAYER: self.toolbar.hide() else: self.toolbar.show() #def _on_gst_player_realized(self, widget): # print "seek seek seek" # self._gstreamer_player.seek_to_saved_position() def on_play_entry_activate(self, event): entry = self.entry_list_view.get_selected()['entry_id'] self._app.play_entry(entry) def on_play_unviewed_activate(self, event): self._app.play_unviewed() def on_play_unviewed_clicked(self, event): self._app.play_unviewed() def _on_player_item_queued(self, player, filename, name, pos, userdata): self._notebook.show_page(N_PLAYER) self.emit('player-show') #if player.get_queue_count() == 1: # try: # self._notebook.set_current_page(N_PLAYER) # player.play() # except: # pass #fails while loading if utils.RUNNING_HILDON: self._player_label.set_markup(_('Player (%d)') % player.get_queue_count()) else: self._player_label.set_markup(_('Player (%d)') % player.get_queue_count()) #if self._state != S_MAJOR_DB_OPERATION: # tip = tooltips(self._player_label) # tip.display_notification("title", "text") def _on_player_items_removed(self, player): if player.get_queue_count() == 0: self._notebook.hide_page(N_PLAYER) self.emit('player-hide') player.stop() if utils.RUNNING_HILDON: self._player_label.set_markup(_('Player (%d)') % player.get_queue_count()) else: self._player_label.set_markup(_('Player (%d)') % player.get_queue_count()) def on_preferences_activate(self, event): self._app.window_preferences.show() def on_quit2_activate(self,event): self._app.do_quit() #make the program quit, dumbass ##DEBUG for exit_toolbutton #if utils.RUNNING_SUGAR: # gtk.main_quit() def on_refresh_activate(self, event): feed = self.feed_list_view.get_selected() self._app.refresh_feed(feed) def on_refresh_feeds_activate(self, event): self._app.poll_feeds(ptvDB.A_ALL_FEEDS) def on_refresh_feeds_with_errors_activate(self, event): self._app.poll_feeds(ptvDB.A_ERROR_FEEDS) def on_refresh_visible_feeds_activate(self, event): if self._active_filter_index > FeedList.SEARCH: feeds = self._app.db.get_feeds_for_tag(self._active_filter_name) self._app.do_poll_multiple(None, ptvDB.A_IGNORE_ETAG, feeds, message=_("Refreshing %s..." % self._active_filter_name)) elif utils.RUNNING_HILDON: self._app.do_poll_multiple(None, ptvDB.A_IGNORE_ETAG) def on_reindex_searches_activate(self, event): self.search_container.set_sensitive(False) self._app.set_state(penguintv.DEFAULT) self.search_entry.set_text(_("Please wait...")) self._app.db.doindex(self._app._done_populating) def _sensitize_search(self): self.search_entry.set_text("") self.search_container.set_sensitive(True) def on_remove_feed_activate(self, event, override=False): assert self._state != S_MAJOR_DB_OPERATION selected = self.feed_list_view.get_selected() if selected: self._notebook.set_current_page(N_FEEDS) if not override: dialog = gtk.Dialog(title=_("Really Remove Feed?"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_REMOVE, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Are you sure you want to remove this feed, all its entries, and all its media? \nThis operation cannot be undone.")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self._app.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog if response != gtk.RESPONSE_ACCEPT: return self._app.remove_feed(selected) def on_resume_all_activate(self, event): self._app.resume_resumable() def on_save_search_clicked(self, event): query = self.search_entry.get_text() if query=="": return self._window_add_search.show() self._window_add_search.set_query(query) def on_search_clear_clicked(self, event): self._app.set_state(penguintv.DEFAULT) def on_saved_searches_activate(self, event): window_edit_saved_searches = EditSearchesDialog.EditSearchesDialog(os.path.join(self._glade_prefix,'extra_dialogs.glade'),self._app) window_edit_saved_searches.show() del window_edit_saved_searches def on_search_entry_activate(self, event): self._app.manual_search(self.search_entry.get_text()) def on_search_entry_changed(self, widget): pass #self.search_entry.get_completion().complete() #if self.search_container.get_property("sensitive"): # self._app.threaded_search(self.search_entry.get_text()) def on_show_downloads_activate(self, event): self._app.show_downloads() def on_stop_downloads_clicked(self, widget): self._app.stop_downloads() #def on_stop_downloads_toggled(self, widget): # print "toggled" # self._app.stop_downloads_toggled(widget.get_active()) def on_synchronize_activate(self, event): self._sync_dialog.Show() def on_standard_layout_activate(self, event): self._app.change_layout('standard') def on_horizontal_layout_activate(self, event): self._app.change_layout('widescreen') def on_vertical_layout_activate(self,event): self._app.change_layout('vertical') def on_planet_layout_activate(self, event): self._app.change_layout('planet') def on_fancy_feed_display_activate(self, menuitem): self.feed_list_view.set_fancy(menuitem.get_active()) self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/fancy_feedlist', menuitem.get_active()) def on_show_notifications_activate(self, menuitem): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/show_notifications', menuitem.get_active()) def activate_layout(self, layout): """gets called by app when it's ready""" self.changing_layout = True self.layout=layout self._app.save_settings() self._app.write_feed_cache() self._layout_dock.remove(self._layout_container) self._layout_dock.add(self.load_layout()) self.entry_view.post_show_init() if self.layout.endswith("planet"): self._menu_widgettree.get_widget('entry_menu_item').hide() else: self._menu_widgettree.get_widget('entry_menu_item').show() self._notebook.show_only(N_FEEDS) if not utils.HAS_SEARCH: self.search_container.hide_all() if utils.RUNNING_HILDON: self._layout_components.get_widget('filter_label').hide() self._filter_unread_checkbox.hide() #if not utils.USE_TAGGING: # self._filter_container.hide_all() if self._use_internal_player: if self._gstreamer_player.get_queue_count() > 0: self._notebook.show_page(N_PLAYER) self.emit('player-show') #can't reset changing_layout because app hasn't updated pointers yet def is_changing_layout(self): return self.changing_layout def display_status_message(self, m, update_category=U_STANDARD): """displays a status message on the main status bar. If this is a polling update or download update, we don't overwrite what's there.""" if self._status_view is None: return current_text = self._status_view.get_status().get_text() if current_text == "": self._status_owner = update_category self._status_view.set_status(m) if utils.HAS_STATUS_ICON: self._status_icon.set_tooltip(m) else: if update_category >= self._status_owner: self._status_view.set_status(m) if utils.HAS_STATUS_ICON: self._status_icon.set_tooltip(m) if m == "": self._status_owner = U_NOBODY else: self._status_owner = update_category #if update_category==U_STANDARD: #only overwrite if this is not a poll or download # self._status_owner = update_category # self._status_view.set_status(m) #elif update_category == U_POLL and self._status_owner != U_STANDARD: # self._status_owner = update_category # self._status_view.set_status(m) #elif update_category == U_DOWNLOAD and self._status_owner == U_DOWNLOAD: # self._status_view.set_status(m) return False #in case of timeouts def update_progress_bar(self, p, update_category=U_STANDARD): """Update the progress bar. if both downloading and polling, polling wins""" if p==-1: self._bar_owner = U_NOBODY self._status_view.set_progress_percentage(0) else: if update_category >= self._bar_owner: self._bar_owner = update_category self._status_view.set_progress_percentage(p) def _unset_state(self): """gets app ready to display new state by unloading current state""" #bring state back to default if self._state == S_MANUAL_SEARCH: self.search_entry.set_text("") if self._state == S_MAJOR_DB_OPERATION: self._widgetTree.get_widget("feed_add_button").set_sensitive(True) self._widgetTree.get_widget("feed_remove").set_sensitive(True) if not utils.RUNNING_SUGAR: #these are menu items self._menu_widgettree.get_widget("add_feed").set_sensitive(True) self._menu_widgettree.get_widget("remove_feed").set_sensitive(True) self._menu_widgettree.get_widget("properties").set_sensitive(True) #elif not utils.USE_TAGGING: # self._menu_widgettree.get_widget("add_feed_filter").set_sensitive(True) self.display_status_message("") self.update_progress_bar(-1,U_LOADING) def __state_changed_cb(self, app, new_state, data=None): d = {penguintv.DEFAULT: S_DEFAULT, penguintv.MANUAL_SEARCH: S_MANUAL_SEARCH, penguintv.TAG_SEARCH: S_TAG_SEARCH, #penguintv.ACTIVE_DOWNLOADS: S_DEFAULT, penguintv.MAJOR_DB_OPERATION: S_MAJOR_DB_OPERATION} new_state = d[new_state] if self._state == new_state: return self._unset_state() if new_state == S_MANUAL_SEARCH: if self.get_active_filter()[1] != FeedList.SEARCH: self.set_active_filter(FeedList.SEARCH) if new_state == S_TAG_SEARCH: self.search_entry.set_text("") if new_state == S_MAJOR_DB_OPERATION: self._widgetTree.get_widget("feed_add_button").set_sensitive(False) self._widgetTree.get_widget("feed_remove").set_sensitive(False) if not utils.RUNNING_SUGAR: #these are menu items self._menu_widgettree.get_widget("add_feed").set_sensitive(False) self._menu_widgettree.get_widget("remove_feed").set_sensitive(False) self._menu_widgettree.get_widget("properties").set_sensitive(False) #elif not utils.USE_TAGGING: # self._menu_widgettree.get_widget("add_feed_filter").set_sensitive(False) self._state = new_state def update_filters(self): """update the filter combo box with the current list of filters""" #get name of current filter, if a tag current_filter = self.get_active_filter()[0] self._filters = [] self._favorite_filters = [] self._filter_tree.clear() completion_model = self.search_entry.get_completion().get_model() completion_model.clear() i=0 #we set i here so that searches and regular tags have incrementing ids builtin = _("All Feeds") text = builtin+" ("+str(len(self._app.db.get_feedlist()))+")" self._filters.append([0,builtin,text,ptvDB.T_BUILTIN]) self._filter_tree.append(None, [text, builtin, 0, True]) i += 1 builtin = _("Downloaded Media") self._filters.append([0,builtin,builtin,ptvDB.T_BUILTIN]) self._filter_tree.append(None, [builtin, builtin, 0, True]) i += 1 builtin = _("Notifying Feeds") text = builtin+" ("+str(len(self._app.db.get_feeds_for_flag(ptvDB.FF_NOTIFYUPDATES)))+")" self._filters.append([0,builtin,text,ptvDB.T_BUILTIN]) self._filter_tree.append(None, [text, builtin, 0, utils.HAS_STATUS_ICON]) i += 1 builtin = _("Search Results") self._filters.append([0,builtin,builtin,ptvDB.T_BUILTIN]) self._search_iter = self._filter_tree.append(None, [builtin, builtin, 0, False]) i += 1 has_search = False if utils.HAS_SEARCH: tags = self._app.db.get_all_tags(ptvDB.T_SEARCH) if tags: has_search = True for tag,favorite in tags: i+=1 self._filters.append([favorite, tag,tag,ptvDB.T_SEARCH]) completion_model.append([tag,_('tag: %s') % (tag,), i - 1]) if favorite > 0: self._favorite_filters.append([favorite, tag,tag, i]) tags = self._app.db.get_all_tags(ptvDB.T_TAG) if tags: self._filter_tree.append(None, ["", "", 1, True]) for tag,favorite in tags: i+=1 self._filters.append([favorite, tag,tag+" ("+str(self._app.db.get_count_for_tag(tag))+")",ptvDB.T_TAG]) completion_model.append([tag,_('tag: %s') % (tag,), i - 1]) if favorite > 0: self._favorite_filters.append([favorite, tag,tag+" ("+str(self._app.db.get_count_for_tag(tag))+")", i]) self._favorite_filters.sort() self._favorite_filters = [f[1:] for f in self._favorite_filters] for fav in self._favorite_filters: self._filter_tree.append(None, [fav[1], fav[0], 0, True]) if tags: if utils.RUNNING_HILDON: all_tags_submenu = None self._filter_tree.append(None, ["", "", 1, True]) else: all_tags_submenu = self._filter_tree.append(None, [_('All Tags'), _('All Tags'), 0, True]) if has_search: for f in self._filters: if f[F_TYPE] == ptvDB.T_SEARCH: self._filter_tree.append(all_tags_submenu, [f[F_DISPLAY], f[F_NAME], 0, True]) self._filter_tree.append(all_tags_submenu, ["", "", 1, True]) for f in self._filters: if f[F_TYPE] == ptvDB.T_TAG: self._filter_tree.append(all_tags_submenu, [f[F_DISPLAY], f[F_NAME], 0, True]) if not utils.RUNNING_HILDON: self._filter_tree.append(None, [_('Edit Favorite Tags...'), _('Edit Favorite Tags...'), 2, True]) #get index for our previously selected tag index = self.get_filter_index(current_filter) if not self.changing_layout: if index is not None: self.set_active_filter(index) else: self.set_active_filter(FeedList.ALL) def set_tag_favorites(self, tag_list): old_order = [f[0] for f in self._favorite_filters] i=0 for t in tag_list[:len(old_order)]: i+=1 print t, i if t != old_order[i-1]: self._app.db.set_tag_favorite(t, i) if len(old_order) > 0: i = len(old_order)-1 else: i = 0 for t in tag_list[len(old_order):]: print t, i i+=1 self._app.db.set_tag_favorite(t, i) old = set(old_order) new = set(tag_list) removed = list(old.difference(new)) for t in removed: self._app.db.set_tag_favorite(t, 0) self.update_filters() def _on_completion_match_selected(self, completion, model, iter, column): self.search_entry.set_text("") self.set_active_filter(model[iter][column]) def finish(self): if self._use_internal_player: self._gstreamer_player.finish() self.desensitize() def get_filter_name(self, filt): return self._filters[filt][F_NAME] def get_filter_index(self, string): names = [m[F_NAME] for m in self._filters] try: index = names.index(string) if names not in FeedList.BUILTIN_TAGS: return index return None except: return None def get_active_filter(self): return (self._active_filter_name,self._active_filter_index) def rename_filter(self, old_name, new_name): names = [m[F_NAME] for m in self._filters] index = names.index(old_name) self._filters[index][F_NAME] = new_name self._filters[index][F_DISPLAY] = new_name def select_feed(self, feed_id): #if we have a tag, pick the first one (really used just when adding #feeds) tags = self._app.db.get_tags_for_feed(feed_id) if len(tags) > 0: if not self._active_filter_name in tags: self.set_active_filter(FeedList.ALL) else: self.set_active_filter(FeedList.ALL) self.feed_list_view.set_selected(feed_id) self.feed_list_view.resize_columns() def update_disk_usage(self, size): if self._disk_usage_widget is None: return self._disk_usage_widget.set_markup(utils.format_size(size)) def update_download_progress(self): progresses = self._mm.get_download_list(Downloader.DOWNLOADING) queued = self._mm.get_download_list(Downloader.QUEUED) paused = self._mm.get_download_list(Downloader.PAUSED) #print len(progresses) if len(progresses)+len(queued)==0: self.display_status_message("") self.update_progress_bar(-1,U_DOWNLOAD) self._download_view.update_downloads() total = len(progresses) + len(queued) + len(paused) self._update_notebook_tabs(total) return total_size = 0 downloaded = 0 for d in progresses+queued: if d.total_size<=0: total_size += 1 else: total_size += d.total_size downloaded += (d.progress/100.0)*d.total_size if total_size == 0: total_size=1 dict = { 'percent': downloaded*100.0/total_size, 'files': len(progresses)+len(queued), 'total': total_size>1 and "("+utils.format_size(total_size)+")" or '', #ternary operator simulation 's': len(progresses)>1 and 's' or '', 'queued': len(queued)} if dict['queued']>0: message = _("Downloaded %(percent)d%% of %(files)d file%(s)s, %(queued)d queued %(total)s") % dict else: message = _("Downloaded %(percent)d%% of %(files)d file%(s)s %(total)s") % dict self.display_status_message(message , U_DOWNLOAD) self.update_progress_bar(dict['percent']/100.0,U_DOWNLOAD) self._download_view.update_downloads() self._update_notebook_tabs(len(progresses)+len(queued)+len(paused)) def _update_notebook_tabs(self, number): #logging.debug("updating notebook tabs (%i)" % number) if number == 0: self._notebook.hide_page(N_DOWNLOADS) else: if utils.RUNNING_HILDON: self._downloads_label.set_markup(_('Downloads (%d)') % number) else: self._downloads_label.set_markup(_('Downloads (%d)') % number) self._notebook.show_page(N_DOWNLOADS) def desensitize(self): if self.app_window: self.app_window.set_sensitive(False) else: self._layout_container.set_sensitive(False) while gtk.events_pending(): #make sure the sensitivity change goes through gtk.main_iteration() def sensitize(self): if self.app_window: self.app_window.set_sensitive(True) else: self._layout_container.set_sensitive(True) while gtk.events_pending(): #make sure the sensitivity change goes through gtk.main_iteration() class NotebookManager(gtk.Notebook): """manages showing and hiding of tabs. Also, hides the whole tab bar if only one tab open, and selects a different tab if the one we are closing is selected""" def __init__(self): gtk.Notebook.__init__(self) # pages_showing refers to tabs that would be visible. # It is overriden by keep_hidden self._pages_showing = {} self._default_page = 0 self._keep_hidden = False def append_page(self, widget, label): self._pages_showing[len(self._pages_showing)] = False gtk.Notebook.append_page(self, widget, label) def show_page(self, n): if not self._pages_showing.has_key(n): return if self._pages_showing[n] == True: return self._pages_showing[n] = True self.get_nth_page(n).show_all() showing_count = 0 for key in self._pages_showing.keys(): if self._pages_showing[key]: showing_count+=1 if showing_count > 1 and not self._keep_hidden: self.set_show_tabs(True) def hide_page(self, n): if not self._pages_showing.has_key(n): return if self._pages_showing[n] == False: return self._pages_showing[n] = False self.get_nth_page(n).hide() showing_count = 0 for key in self._pages_showing.keys(): if self._pages_showing[key]: showing_count+=1 if showing_count == 1: for key in self._pages_showing.keys(): if self._pages_showing[key]: self.set_current_page(key) self.set_show_tabs(False) if self.get_current_page() == n: self.set_current_page(self._default_page) def show_only(self, n): if not self._pages_showing.has_key(n): return self._default_page = n for i in range(0,self.get_n_pages()): self._pages_showing[i] = i==n if i == n: self.get_nth_page(i).show_all() else: self.get_nth_page(i).hide_all() self.set_current_page(n) self.set_show_tabs(False) def set_keep_hidden(self, hide): """For fullscreen mode, we never want to show tabs""" if hide: self.set_show_tabs(False) self._keep_hidden = True else: self._keep_hidden = False showing_count = 0 for key in self._pages_showing.keys(): if self._pages_showing[key]: showing_count+=1 if showing_count > 1: self.set_show_tabs(True) def is_showing(self, n): try: return self._pages_showing[n] except: return False class ShouldntHappenError(Exception): def __init__(self,error): self.error = error def __str__(self): return self.error PenguinTV-4.2.0/penguintv/trayicon/0000755000000000000000000000000011450514774014141 5ustar PenguinTV-4.2.0/penguintv/trayicon/__init__.py0000644000000000000000000000000010646750246016242 0ustar PenguinTV-4.2.0/penguintv/trayicon/TrayIcon.py0000755000000000000000000001500111303005474016230 0ustar #!/usr/bin/env python import os, sys import traceback #debug so utils imports #sys.path.append("/home/owen/penguintv/penguintv") import logging import time import pygtk pygtk.require("2.0") import gtk import gobject import utils HAS_PYNOTIFY = False if utils.RUNNING_HILDON: import hildon elif utils.get_pynotify_ok(): import pynotify HAS_PYNOTIFY = True else: import SonataNotification MAX_HEIGHT = 64 MAX_WIDTH = 64 MIN_SIZE = 16 class StatusTrayIcon(gtk.StatusIcon): __gsignals__ = { 'notification-clicked': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), } def __init__(self, icon, menu=None, show_always=True, parent=None): #Init StatusIcon gtk.StatusIcon.__init__(self) if HAS_PYNOTIFY: #Initialize Notification pynotify.init("PenguinTVNotification") self.set_from_file(icon) self.set_tooltip('') if menu is not None: self.menu = menu self.connect('popup-menu', self.__popup_menu_cb) self._notifications = [] self._updater_id = -1 self._notification_displaying = False self._show_always = show_always self._parent = parent self.set_visible(self._show_always) def set_parent(self, p): self._parent = p def set_show_always(self, b): self._show_always = b if self._show_always: self.set_visible(True) elif not self._notification_displaying: self.set_visible(False) def display_notification(self, title, message, icon=None, userdata=None): self.set_visible(True) self._notifications.append([title, message, icon, userdata]) if self._updater_id == -1: self._updater_id = gobject.timeout_add(1000, self._display_notification_handler) def clear_notifications(self): self._notifications = [] def _display_notification_handler(self): if len(self._notifications) == 0: self._updater_id = -1 return False if self._notification_displaying: return True title, message, icon, userdata = self._notifications.pop(0) icon_pixbuf = self._scale_pixbuf(icon) if utils.RUNNING_HILDON: self._display_hildonnotification(title, message, icon, userdata) elif HAS_PYNOTIFY: self._display_pynotification(title, message, icon_pixbuf, userdata) else: self._display_sonatafication(title, message, icon_pixbuf, userdata) return True def _display_hildonnotification(self, title, message, icon=None, userdata=None): self._notification_displaying = True if self._parent is None: logging.info("not showing notification, no parent widget") logging.debug("showing notification: %s %s" % (title, message)) try: b = hildon.hildon_banner_show_information_with_markup(self._parent, icon, "%s\n%s" % (title, message)) except TypeError: #banner bug not fixed yet b = hildon.hildon_banner_show_information_with_markup(self._parent, "NULL", "%s\n%s" % (title, message)) if icon is not None: b.set_icon_from_file(icon) b.set_timeout(3000) def done_showing(): self._notification_displaying = False return False gobject.timeout_add(5000, done_showing) def _display_pynotification(self, title, message, icon=None, userdata=None): #don't need this, pynotifications can stack up #self._notification_displaying = True #logging.debug("displaying pynotification: %s %s" % (title, message)) if icon is not None: notification = pynotify.Notification(title, message, None) notification.set_icon_from_pixbuf(icon) else: notification = pynotify.Notification(title, message, "info") notification.set_timeout(3000) notification.set_data('userdata', userdata) #setting a default action used to work, but now it causes the notification to become #a boring OK/Cancel dialog box, which is weird #notification.add_action('default', 'Default Action', self.__pynotification_click_cb) #notification.connect('closed', self.__notification_closed_cb) #screen, rect, orient = self.get_geometry() #notification.set_hint("x", rect.x +(rect.width / 2)) #notification.set_hint("y", rect.y +(rect.height / 2)) notification.show() def _display_sonatafication(self, title, message, icon=None, userdata=None): self._notification_displaying = True notification = SonataNotification.TrayIconTips(self) notification.set_timeout(5000) notification.display_notification(title, message, icon) notification.connect('hide', self.__notification_closed_cb) notification.connect('clicked', self.__sonatafication_click_cb, userdata) notification.connect('closed', self.__notification_closed_cb) def _scale_pixbuf(self, filename): try: p = gtk.gdk.pixbuf_new_from_file(filename) except: p = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, MIN_SIZE, MIN_SIZE) p.fill(0xffffff00) return p height = p.get_height() width = p.get_width() if height > MAX_HEIGHT: height = MAX_HEIGHT width = p.get_width() * height / p.get_height() if width > MAX_WIDTH: width = MAX_WIDTH height = p.get_height() * width / p.get_width() if height != p.get_height() or width != p.get_width(): p = gtk.gdk.pixbuf_new_from_file_at_size(filename, width, height) return p ###Callbacks def __popup_menu_cb(self, status, button, time): self.menu.popup(None, None, None, button, time) def __sonatafication_click_cb(self, notification, action, userdata): self.emit('notification-clicked', userdata) notification.close() def __pynotification_click_cb(self, notification, action): userdata = notification.get_data('userdata') self.emit('notification-clicked', userdata) notification.close() def __notification_closed_cb(self, widget): self._notification_displaying = False if not self._show_always: self.set_visible(False) def __menu_cb(self, data): action = data.get_accel_path().split("/")[-1] self.emit('menu-clicked', action) def _test_tray_icon(icon): icon.display_notification('title','message','/home/owen/src/penguintv/share/penguintvicon.png') icon.set_tooltip('yo yo yo!') icon.display_notification('title2','message2','/home/owen/src/penguintv/share/penguintvicon.png') icon.display_notification('title3','message3','/home/owen/src/penguintv/share/penguintvicon.png') icon.display_notification('title4','message4','/home/owen/src/penguintv/share/penguintvicon.png') return False if __name__ == '__main__': # Here starts the dynamic part of the program h = None if utils.RUNNING_HILDON: h = hildon.Window() l = gtk.Label("hello world") h.add(l) h.show_all() trayicon = StatusTrayIcon('/home/owen/src/penguintv/share/penguintvicon.png', parent=h) gobject.timeout_add(2000, _test_tray_icon, trayicon) gtk.main() PenguinTV-4.2.0/penguintv/trayicon/SonataNotification.py0000644000000000000000000001273711300532355020306 0ustar # This code adapted from Sonata (http://sonata.berlios.de/), thanks to Scott Horowitz import gtk import gobject class TrayIconTips(gtk.Window): """Custom tooltips derived from gtk.Window() that allow for markup text and multiple widgets, e.g. a progress bar. ;)""" MARGIN = 4 __gsignals__ = { 'clicked': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])), 'closed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])) } def __init__(self, widget=None): gtk.Window.__init__(self, gtk.WINDOW_POPUP) if widget is not None: self._pos_widget = widget # from gtktooltips.c:gtk_tooltips_force_window self.set_app_paintable(True) self.set_resizable(False) self.set_name("gtk-tooltips") self.connect('expose-event', self._on__expose_event) self.connect('event-after', self._motion_cb) self.connect('button_press_event', self.__button_press_cb) self.set_events(gtk.gdk.EXPOSURE_MASK | gtk.gdk.LEAVE_NOTIFY_MASK | gtk.gdk.BUTTON_PRESS_MASK) self._timeout = 5000 self._hide_ok = True self.use_notifications_location = False self.notifications_location = 0 #basic notification widget hbox = gtk.HBox() hbox.set_spacing(5) self._image = gtk.Image() hbox.pack_start(self._image, False) vbox = gtk.VBox() self._title = gtk.Label() self._title.set_justify(gtk.JUSTIFY_LEFT) self._title.set_alignment(0, .5) vbox.pack_start(self._title, False) self._text = gtk.Label() self._text.set_justify(gtk.JUSTIFY_LEFT) self._text.set_alignment(0, .5) vbox.pack_start(self._text, True) hbox.pack_start(vbox, True, True) vbox = gtk.VBox() img = gtk.Image() img.set_from_stock("gtk-close", gtk.ICON_SIZE_MENU) button = gtk.Button() button.set_image(img) button.connect('clicked', self.__close_clicked_cb) vbox.pack_start(button, False, False) label = gtk.Label("") vbox.pack_start(label, True, True) hbox.pack_start(vbox, False, False) align = gtk.Alignment(0,0,1,1) align.set_padding(10,10,10,10) align.add(hbox) self.add(align) align.show_all() self.set_size_request(500,96) def _calculate_pos(self, widget): try: icon_screen, icon_rect, icon_orient = widget.get_geometry() x = icon_rect[0] y = icon_rect[1] width = icon_rect[2] height = icon_rect[3] except: x,y,width,height = widget.get_allocation() x2,y2 = widget.translate_coordinates(widget.get_parent(), 0, 0) x += x2 y += y2 + height w, h = self.size_request() screen = self.get_screen() pointer_screen, px, py, _ = screen.get_display().get_pointer() if pointer_screen != screen: px = x py = y try: # Use the monitor that the systemtray icon is on monitor_num = screen.get_monitor_at_point(x, y) except: # No systemtray icon, use the monitor that the pointer is on monitor_num = screen.get_monitor_at_point(px, py) monitor = screen.get_monitor_geometry(monitor_num) try: # If the tooltip goes off the screen horizontally, realign it so that # it all displays. if (x + w) > monitor.x + monitor.width: x = monitor.x + monitor.width - w # If the tooltip goes off the screen vertically (i.e. the system tray # icon is on the bottom of the screen), realign the icon so that it # shows above the icon. if ((y + h + height + self.MARGIN) > monitor.y + monitor.height): y = y - h - self.MARGIN else: y = y + height + self.MARGIN except: pass if self.use_notifications_location == False: try: return x, y except: #Fallback to top-left: return monitor.x, monitor.y elif self.notifications_location == 0: try: return x, y except: #Fallback to top-left: return monitor.x, monitor.y elif self.notifications_location == 1: return monitor.x, monitor.y elif self.notifications_location == 2: return monitor.x + monitor.width - w, monitor.y elif self.notifications_location == 3: return monitor.x, monitor.y + monitor.height - h elif self.notifications_location == 4: return monitor.x + monitor.width - w, monitor.y + monitor.height - h def _motion_cb (self, widget, event): if event.type == gtk.gdk.LEAVE_NOTIFY: self._hide_ok = True if event.type == gtk.gdk.ENTER_NOTIFY: self._hide_ok = False def __button_press_cb(self, widget, event): self.emit('clicked', 1) def __close_clicked_cb(self, widget): self.hide() self.emit('closed') # from gtktooltips.c:gtk_tooltips_paint_window def _on__expose_event(self, window, event): w, h = window.size_request() window.style.paint_flat_box(window.window, gtk.STATE_NORMAL, gtk.SHADOW_OUT, None, window, "tooltip", 0, 0, w, h) return False def _real_display(self, widget): x, y = self._calculate_pos(widget) self.move(x, y) self.show() # Public API def close(self): gtk.Window.hide(self) self.notif_handler = None def hide(self): if self._hide_ok: gtk.Window.hide(self) self.notif_handler = None return False else: return True def set_timeout(self, timeout): self._timeout = timeout def display_notification(self, title, text, icon=None): if icon is not None: self._image.set_from_pixbuf(icon) else: self._image.set_from_stock('gtk-dialog-info', gtk.ICON_SIZE_DIALOG) self._image.show() self._title.set_markup(''+title+'') self._text.set_markup(text) self._real_display(self._pos_widget) gobject.timeout_add(self._timeout, self.hide) PenguinTV-4.2.0/penguintv/FeedList.py0000644000000000000000000013117411375016507014366 0ustar import sys, os, re import glob import logging import random import traceback import time import gtk import gobject import pango import penguintv import ptvDB import utils import IconManager import MainWindow if utils.RUNNING_HILDON: import hildon NONE=-1 #unused, needs a value ALL=0 DOWNLOADED=1 NOTIFY=2 SEARCH=3 BUILTIN_TAGS=[_("All Feeds"),_("Downloaded Media"), _("Notifying Feeds"), _("Search Results")] TITLE=0 MARKUPTITLE=1 FEEDID=2 STOCKID=3 READINFO=4 PIXBUF=5 DETAILS_LOADED=6 UNREAD=7 TOTAL=8 FLAG=9 VISIBLE=10 POLLFAIL=11 FIRSTENTRYTITLE=12 NOTVISIBLE=13 #STATES S_DEFAULT = 0 S_SEARCH = 1 S_MAJOR_DB_OPERATION = 2 MAX_WIDTH = 48 MAX_HEIGHT = 48 MIN_SIZE = 24 class FeedList(gobject.GObject): __gsignals__ = { 'link-activated': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'feed-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])), 'feed-clicked': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'search-feed-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])), 'no-feed-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, []), 'state-change': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])) } def __init__(self, widget_tree, app, fancy=False): gobject.GObject.__init__(self) self._app = app self._icon_manager = IconManager.IconManager(self._app.db.home) self._scrolled_window = widget_tree.get_widget('feed_scrolled_window') if utils.RUNNING_HILDON: hildon.hildon_helper_set_thumb_scrollbar(self._scrolled_window, True) self._va = self._scrolled_window.get_vadjustment() self._widget = widget_tree.get_widget('feedlistview') self._entry_list_widget = widget_tree.get_widget('entrylistview') self._feedlist = gtk.ListStore(str, #title str, #markup title int, #feed_id str, #stockid str, #readinfo gtk.gdk.Pixbuf, #pixbuf bool, #details loaded int, #unread int, #total int, #flag bool, #visible bool, #pollfail str) #first entry title self._last_selected=None self._last_feed=None self.filter_setting=ALL self.filter_name = _("All Feeds") self._selecting_misfiltered=False self._filter_unread = False self._cancel_load = [False,False] #loading feeds, loading details self._loading_details = 0 self._state = S_DEFAULT self._fancy = fancy self.__widget_width = 0 self.__resetting_columns = False self.__displayed_context_menu = False #for hildon #build list view self._feed_filter = self._feedlist.filter_new() self._feed_filter.set_visible_column(VISIBLE) self._widget.set_model(self._feed_filter) # Icon Column self._icon_renderer = gtk.CellRendererPixbuf() self._icon_column = gtk.TreeViewColumn(_('Icon')) self._icon_column.pack_start(self._icon_renderer, False) self._icon_column.set_attributes(self._icon_renderer, stock_id=STOCKID) if utils.RUNNING_HILDON: self._icon_column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE) else: self._icon_column.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) self._icon_column.set_min_width(32) self._widget.append_column(self._icon_column) # Feed Column renderer = gtk.CellRendererText() renderer.set_property("ellipsize", pango.ELLIPSIZE_END) self._feed_column = gtk.TreeViewColumn(_('Feeds')) self._feed_column.pack_start(renderer, True) self._feed_column.set_attributes(renderer, markup=MARKUPTITLE) self._feed_column.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) self._feed_column.set_resizable(True) self._feed_column.set_expand(True) self._widget.append_column(self._feed_column) # Articles column self._articles_renderer = gtk.CellRendererText() self._articles_column = gtk.TreeViewColumn(_('')) self._articles_column.set_resizable(False) self._articles_column.pack_start(self._articles_renderer, False) self._articles_column.set_attributes(self._articles_renderer, markup=READINFO) self._articles_column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE) self._articles_column.set_expand(False) self._widget.append_column(self._articles_column) # Image Column feed_image_renderer = gtk.CellRendererPixbuf() self._image_column = gtk.TreeViewColumn(_('Image')) self._image_column.pack_start(feed_image_renderer, False) self._image_column.set_attributes(feed_image_renderer, pixbuf=PIXBUF) self._image_column.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) self._image_column.set_min_width(MAX_WIDTH + 10) self._image_column.set_max_width(MAX_WIDTH + 10) self._image_column.set_expand(False) if self._fancy: self._widget.append_column(self._image_column) self.resize_columns() #signals are MANUAL ONLY self._widget.get_selection().connect("changed", self._item_selection_changed) self._widget.connect("row-activated", self.on_row_activated) self._widget.connect("button-press-event", self._on_button_press_event) self._widget.connect("button-release-event", self._on_button_release_event) if utils.RUNNING_HILDON: self._widget.tap_and_hold_setup(menu=self._get_context_menu(False)) self._handlers = [] h_id = self._app.connect('feed-polled', self.__feed_polled_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('feed-added', self.__feed_added_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('feed-removed', self.__feed_removed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('feed-name-changed', self.__feed_name_changed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('entry-updated', self.__entry_updated_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('tags-changed', self.__tags_changed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('state_changed', self.__state_changed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('entries-viewed', self.__entries_viewed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('entries-unviewed', self.__entries_unviewed_cb) self._handlers.append((self._app.disconnect, h_id)) #init style if self._fancy: if utils.RUNNING_SUGAR: self._icon_renderer.set_property('stock-size',gtk.ICON_SIZE_SMALL_TOOLBAR) elif utils.RUNNING_HILDON: self._icon_renderer.set_property('stock-size',gtk.ICON_SIZE_BUTTON) else: self._icon_renderer.set_property('stock-size',gtk.ICON_SIZE_LARGE_TOOLBAR) self._widget.set_property('rules-hint', True) def finalize(self): for disconnector, h_id in self._handlers: disconnector(h_id) def set_entry_view(self, entry_view): h_id = entry_view.connect('entries-viewed', self.__entries_viewed_cb) self._handlers.append((entry_view.disconnect, h_id)) def __feed_polled_cb(self, app, feed_id, update_data): if update_data.has_key('no_changes'): if update_data['no_changes']: self.update_feed_list(feed_id, ['icon','image'], update_data) return self.update_feed_list(feed_id, ['readinfo','icon','title','image'], update_data) def __feed_added_cb(self, app, feed_id, success): self.update_feed_list(feed_id, ['title']) def __feed_removed_cb(self, app, feed_id): self.remove_feed(feed_id) self.resize_columns() def __feed_name_changed_cb(self, app, feed_id, oldname, name): self.update_feed_list(feed_id,['title'],{'title':name}) self.resize_columns() def __tags_changed_cb(self, app, a): self.filter_all(False) def __entry_updated_cb(self, app, entry_id, feed_id): self.update_feed_list(feed_id,['readinfo','icon']) for f in self._app.db.get_pointer_feeds(feed_id): self.update_feed_list(f,['readinfo','icon']) #def __entry_selected_cb(self, feed_id, entry_id): # self.mark_entries_read(feed_id, 1) # def __entries_viewed_cb(self, app, viewlist): #logging.debug("feedlist entries viewed") for feed_id, id_list in viewlist: self.mark_entries_read(len(id_list), feed_id) for f in self._app.db.get_associated_feeds(feed_id): if f != feed_id: self.update_feed_list(f, ['readinfo','icon']) def __entries_unviewed_cb(self, app, viewlist): for feed_id, id_list in viewlist: self.mark_entries_read(0 - len(id_list), feed_id) for f in self._app.db.get_associated_feeds(feed_id): if f != feed_id: self.update_feed_list(f, ['readinfo','icon']) def __update_feed_count_cb(self, o, feed_id, count): #logging.debug("update %i -> %i" % (feed_id, count)) self.update_feed_list(feed_id, update_what=['readinfo'], update_data={'unread_count':count}) for f in self._app.db.get_associated_feeds(feed_id): if f != feed_id: self.update_feed_list(f, ['readinfo','icon']) def grab_focus(self): self._widget.grab_focus() def populate_feeds(self,callback=None, subset=ALL): """With 100 feeds, this is starting to get slow (2-3 seconds). Speed helped with cache""" #DON'T gtk.iteration in this func! Causes endless loops! #if utils.RUNNING_HILDON: # self._articles_column.set_visible(False) if len(self._feedlist)==0: #first fill out rough feedlist db_feedlist = self._app.db.get_feedlist() blank_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, 10,10) blank_pixbuf.fill(0xffffff00) for feed_id,title,url in db_feedlist: if utils.RUNNING_HILDON: title_m = '%s' % title elif self._fancy: title_m = title+"\n" else: title_m = title self._feedlist.append([title, title_m, feed_id, 'gtk-stock-blank', "", blank_pixbuf, False, 0, 0, 0, False, False, ""]) #assume invisible self.filter_all(False) gobject.idle_add(self._update_feeds_generator(callback,subset).next) #self._update_feeds_generator(subset) return False #in case this was called by the timeout below def _update_feeds_generator(self, callback=None, subset=ALL): """A generator that updates the feed list. Called from populate_feeds""" selection = self._widget.get_selection() #selected = self.get_selected() feed_cache = self._app.db.get_feed_cache() db_feedlist = self._app.db.get_feedlist() blank_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, 10,10) blank_pixbuf.fill(0xffffff00) # While populating, allow articles column to autosize #self._articles_column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE) self._articles_column.set_min_width(50) self._feed_column.set_resizable(False) self._feed_column.set_expand(False) self._feed_column.set_min_width(self._feed_column.get_width()) # create a sorted list of feedids with the visible ones first # if not ALL, then we don't sort and it's all fine i=-1 loadlist = [] for f in self._feedlist: i+=1 loadlist.append((not f[VISIBLE], i)) if subset == ALL: loadlist.sort() j=-1 for vis, i in loadlist: feed_id,title,url = db_feedlist[i] if self._cancel_load[0]: break j+=1 if subset==DOWNLOADED: flag = self._feedlist[i][FLAG] if flag & ptvDB.F_DOWNLOADED==0 and flag & ptvDB.F_PAUSED==0: print "not downloaded and not paused, skipping" continue elif subset==VISIBLE: if not self._feedlist[i][VISIBLE]: continue elif subset==NOTVISIBLE: if self._feedlist[i][VISIBLE]: continue if feed_cache is not None: try: cached = feed_cache[i] flag = cached[1] unviewed = cached[2] entry_count= cached[3] pollfail = cached[4] m_first_entry_title = cached[5] except: #bad cache, trigger test below entry_count = None else: feed_info = self._app.db.get_feed_verbose(feed_id) unviewed = feed_info['unread_count'] flag = feed_info['important_flag'] pollfail = feed_info['poll_fail'] entry_count = feed_info['entry_count'] m_first_entry_title = "" if entry_count==0 or entry_count is None: #this is a good indication that the cache is bad feed_info = self._app.db.get_feed_verbose(feed_id) unviewed = feed_info['unread_count'] flag = feed_info['important_flag'] pollfail = feed_info['poll_fail'] entry_count = feed_info['entry_count'] m_first_entry_title = "" if self._feedlist[i][FLAG]!=0: flag = self._feedlist[i][FLAG] #don't overwrite flag (race condition) if unviewed == 0 and flag & ptvDB.F_UNVIEWED: print "WARNING: zero unread articles but flag says there should be some" flag -= ptvDB.F_UNVIEWED if self.filter_setting == DOWNLOADED: visible = bool(flag & ptvDB.F_DOWNLOADED) else: visible = self._feedlist[i][VISIBLE] if self._fancy: if visible: if len(m_first_entry_title) == 0: m_first_entry_title = self._app.db.get_first_entry_title(feed_id, True) m_details_loaded = True else: if len(m_first_entry_title) > 0: m_details_loaded = True else: m_details_loaded = False if utils.RUNNING_HILDON: m_pixbuf = self._icon_manager.get_icon_pixbuf(feed_id , 64, 64, MIN_SIZE, MIN_SIZE) else: m_pixbuf = self._icon_manager.get_icon_pixbuf(feed_id) #, #MAX_WIDTH, MAX_HEIGHT, MIN_SIZE, MIN_SIZE) model, iter = selection.get_selected() try: sel = model[iter][FEEDID] except: sel = -1 m_title = self._get_fancy_markedup_title(title,m_first_entry_title,unviewed,entry_count,flag,feed_id) m_readinfo = self._get_markedup_title("(%d/%d)\n" % (unviewed,entry_count), flag) else: m_title = self._get_markedup_title(title,flag) if utils.RUNNING_HILDON: m_readinfo = self._get_markedup_title("(%d)" % (unviewed), flag) else: m_readinfo = self._get_markedup_title("(%d/%d)" % (unviewed,entry_count), flag) m_pixbuf = blank_pixbuf m_first_entry_title = "" m_details_loaded = False icon = self._get_icon(flag) if pollfail: if icon=='gtk-harddisk' or icon=='gnome-stock-blank': icon='gtk-dialog-error' self._feedlist[i] = [title, m_title, feed_id, icon, m_readinfo, m_pixbuf, m_details_loaded, unviewed, entry_count, flag, visible, pollfail, m_first_entry_title] if self.filter_setting == DOWNLOADED and visible: self._feed_filter.refilter() self._app.main_window.update_progress_bar(float(j)/len(db_feedlist),MainWindow.U_LOADING) yield True self._app.main_window.update_progress_bar(-1,MainWindow.U_LOADING) # Once we are done populating, set size to fixed, otherwise we get # a nasty flicker when we click on feeds self.resize_columns() if self._fancy: gobject.timeout_add(500, self._load_details(visible_only=False).next) if not self._cancel_load[0]: if self._fancy: gobject.idle_add(self._load_details().next) #if selected: # self.set_selected(selected) if callback is not None: try: callback() except: pass else: self._cancel_load[0] = False yield False def resize_columns(self): self._reset_articles_column() def _reset_articles_column(self, harsh=False): #temporarily allow articles column to size itself, then set it #to fixed again to avoid flicker. #don't allow us to resize twice in a row (before idle_add can act) if self.__resetting_columns: return self.__resetting_columns = True self._articles_column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE) self._articles_column.set_min_width(50) self._feed_column.set_resizable(True) self._feed_column.set_expand(True) self._feed_column.set_min_width(0) self._widget.columns_autosize() def _finish_resize(): self._articles_column.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED) self._articles_column.set_min_width(self._articles_column.get_width()) self.__resetting_columns = False return False gobject.idle_add(_finish_resize) def update_feed_list(self, feed_id=None, update_what=None, update_data=None, recur_ok=True): #returns True if this is the already-displayed feed """updates the feed list. Right now uses db to get flags, entrylist (for unread count), pollfail We should just get the flag, unread count, and poll fail, and then figure out: icon, markup, and numbers update_data would be a dic with unreadcount, flag list, and pollfail update_what is a bunch of strings saying what we want to update. it will go to the db for info unless the value is already in update_data""" #logging.debug("updating feed list: %s", str(update_what)) if feed_id is None: if self._last_feed is None: return feed_id = self._feedlist[self._last_feed][FEEDID] if update_what is None: update_what = ['readinfo','icon','title'] if update_data is None: update_data = {} if 'readinfo' in update_what and 'title' not in update_what: update_what.append('title') #need this too try: feed = self._feedlist[self.find_index_of_item(feed_id)] except: logging.warning("tried to update feed not in list: %i, %s, %s, %s" % (feed_id, str(update_what), str(update_data), str(recur_ok))) return need_filter = False #some updates will require refiltering. need_resize = False if update_what == ['icon'] and update_data.has_key('icon'): #FIXME: hack for download notification feed[STOCKID] = update_data['icon'] return if 'title' in update_what or 'icon' in update_what: if not update_data.has_key('flag_list'): update_data['flag_list'] = self._app.db.get_entry_flags(feed_id) updated=0 unviewed=0 downloaded=0 for flag in update_data['flag_list']: if flag & ptvDB.F_UNVIEWED == ptvDB.F_UNVIEWED: unviewed=unviewed+1 if flag & ptvDB.F_DOWNLOADED or flag & ptvDB.F_PAUSED: downloaded=1 flag = self._pick_important_flag(feed_id, update_data['flag_list']) if 'image' in update_what and self._fancy: if utils.RUNNING_HILDON: feed[PIXBUF] = self._icon_manager.get_icon_pixbuf(feed_id, 64, 64, MIN_SIZE, MIN_SIZE) else: feed[PIXBUF] = self._icon_manager.get_icon_pixbuf(feed_id) #, #MAX_WIDTH, MAX_HEIGHT, MIN_SIZE, MIN_SIZE) if 'readinfo' in update_what: #db_unread_count = self._app.db.get_unread_count(feed_id) #need it always for FIXME below if not update_data.has_key('unread_count'): update_data['unread_count'] = self._app.db.get_unread_count(feed_id)#, db_unread_count) if update_data['unread_count'] > 0: if feed[FLAG] & ptvDB.F_UNVIEWED==0: feed[FLAG] = feed[FLAG] + ptvDB.F_UNVIEWED else: if feed[FLAG] & ptvDB.F_UNVIEWED: feed[FLAG] = feed[FLAG] - ptvDB.F_UNVIEWED feed[UNREAD] = update_data['unread_count'] feed[TOTAL] = len(update_data['flag_list']) if utils.RUNNING_HILDON and not self._fancy: readinfo_string = "(%d)" % (update_data['unread_count'],) else: readinfo_string = "(%d/%d)" % (update_data['unread_count'], len(update_data['flag_list'])) if self._fancy: readinfo_string += "\n" if readinfo_string != feed[READINFO]: feed[READINFO] = self._get_markedup_title(readinfo_string,flag) #print feed[MARKUPTITLE], feed[READINFO] need_resize = True if self._filter_unread: if self.filter_test_feed(feed_id): #no sense testing the filter if we won't see it need_filter = True if 'title' in update_what: selected = self.get_selected() if not update_data.has_key('title'): update_data['title'] = self._app.db.get_feed_title(feed_id) old_title_len = len(feed[TITLE]) new_title_len = len(update_data['title']) # don't update feed[TITLE] yet, we need these data first if self._fancy: try: feed[FIRSTENTRYTITLE] = self._app.db.get_first_entry_title(feed_id, True) except: feed[FIRSTENTRYTITLE] = "" feed[MARKUPTITLE] = self._get_fancy_markedup_title(update_data['title'],feed[FIRSTENTRYTITLE],feed[UNREAD], feed[TOTAL], flag, feed[FEEDID]) else: feed[MARKUPTITLE] = self._get_markedup_title(update_data['title'], flag) if feed[TITLE] != update_data['title']: feed[TITLE] = update_data['title'] try: old_iter = self._feedlist.get_iter((self.find_index_of_item(feed_id),)) new_iter = self._feedlist.get_iter(([f[0] for f in self._app.db.get_feedlist()].index(feed_id),)) self._feedlist.move_after(old_iter,new_iter) if selected == feed_id: self._widget.scroll_to_cell((self.find_index_of_item(feed_id),)) except: print "Error finding feed for update" need_filter = True #columns_autosize produces a flicker, so only do it if we need to if abs(new_title_len - old_title_len) > 5: need_resize = True #self.resize_columns() if 'icon' in update_what: if not update_data.has_key('pollfail'): update_data['pollfail'] = self._app.db.get_feed_poll_fail(feed_id) feed[POLLFAIL] = update_data['pollfail'] feed[STOCKID] = self._get_icon(flag) if update_data['pollfail']: #print update_data if feed[STOCKID]=='gtk-harddisk' or feed[STOCKID]=='gnome-stock-blank': feed[STOCKID]='gtk-dialog-error' feed[FLAG] = flag if self.filter_setting == DOWNLOADED: if downloaded==0: need_filter = True if need_filter and self._state != S_SEARCH:#not self._showing_search: self._filter_one(feed) if need_resize: self.resize_columns() def mark_entries_read(self, num_to_mark, feed_id=None): """alters the number of unread entries by num_to_mark. if negative, marks some as unread""" #there's some trickiness here. The model for the selection is #self._feed_filter, not self._feedlist, so we can't write to items #in that model. We have to go back and find where this feed is in the #original model. if feed_id is None: s = self._widget.get_selection().get_selected() if s is None: return model, iter = s if iter is None: return unfiltered_iter = model.convert_iter_to_child_iter(iter) feed = self._feedlist[unfiltered_iter] else: feed = self._feedlist[self.find_index_of_item(feed_id)] #sanity check if feed[UNREAD] - num_to_mark < 0 or feed[UNREAD] - num_to_mark > feed[TOTAL]: print "WARNING: trying to mark more or less than we have:", feed[TITLE], feed[UNREAD], num_to_mark print feed[UNREAD],feed[TOTAL],num_to_mark self.update_feed_list(feed[FEEDID], ['readinfo']) return feed[UNREAD] -= num_to_mark if feed[UNREAD] == 0 and feed[FLAG] & ptvDB.F_UNVIEWED: feed[FLAG] -= ptvDB.F_UNVIEWED if feed[UNREAD] > 0 and feed[FLAG] & ptvDB.F_UNVIEWED == 0: feed[FLAG] += ptvDB.F_UNVIEWED if utils.RUNNING_HILDON and not self._fancy: readinfo_string = "(%d)" % (feed[UNREAD],) else: readinfo_string = "(%d/%d)" % (feed[UNREAD], feed[TOTAL]) if self._fancy: readinfo_string += "\n" feed[MARKUPTITLE] = self._get_fancy_markedup_title(feed[TITLE], feed[FIRSTENTRYTITLE], feed[UNREAD], feed[TOTAL], feed[FLAG], feed[FEEDID]) else: feed[MARKUPTITLE] = self._get_markedup_title(feed[TITLE], feed[FLAG]) feed[READINFO] = self._get_markedup_title(readinfo_string,feed[FLAG]) if self._filter_unread: self._filter_one(feed) def show_search_results(self, results=[]): """shows the feeds in the list 'results'""" if self._state != S_SEARCH: print "not in search state, returning" return if self._last_feed is not None: old_item = self._feedlist[self._last_feed] old_item[MARKUPTITLE] = self._get_fancy_markedup_title(old_item[TITLE],old_item[FIRSTENTRYTITLE],old_item[UNREAD], old_item[TOTAL], old_item[FLAG], old_item[FEEDID]) if results is None: results = [] #print results[0] if len(results) == 0: for feed in self._feedlist: feed[VISIBLE] = 0 self._feed_filter.refilter() return for feed in self._feedlist: if feed[FEEDID] in results: feed[VISIBLE] = 1 else: feed[VISIBLE] = 0 id_list = [feed[FEEDID] for feed in self._feedlist] def sorter(a, b): if a[VISIBLE] != b[VISIBLE]: return b[VISIBLE] - a[VISIBLE] if a[VISIBLE]==1: return results.index(a[FEEDID]) - results.index(b[FEEDID]) else: return id_list.index(a[FEEDID]) - id_list.index(b[FEEDID]) #convert to list f_list = list(self._feedlist) #we sort the new feed list as is f_list.sort(sorter) #we go through the new feed list, and for each id find its old index i_list = [] for f in f_list: i_list.append(id_list.index(f[FEEDID])) #we now have a list of old indexes in the new order self._feedlist.reorder(i_list) self._feed_filter.refilter() if self._fancy: gobject.idle_add(self._load_details().next) self._va.set_value(0) self._widget.get_selection().unselect_all() def _unset_state(self, data=True): if self._state == S_SEARCH: gonna_filter = data showing_feed = self.get_selected() id_list = [feed[FEEDID] for feed in self._feedlist] f_list = list(self._feedlist) def alpha_sorter(x,y): if x[TITLE].upper()>y[TITLE].upper(): return 1 if x[TITLE].upper()==y[TITLE].upper(): return 0 return -1 f_list.sort(alpha_sorter) i_list = [] for f in f_list: i_list.append(id_list.index(f[FEEDID])) self._feedlist.reorder(i_list) if showing_feed is not None: #self._app.display_feed(showing_feed) self.emit('feed-selected', showing_feed) if not self.filter_test_feed(showing_feed): self._app.main_window.set_active_filter(ALL) self.set_selected(showing_feed) elif gonna_filter == False: self._app.main_window.set_active_filter(ALL) #self._app.display_entry(None) self.emit('no-feed-selected') def __state_changed_cb(self, app, newstate, data=None): d = {penguintv.DEFAULT: S_DEFAULT, penguintv.MANUAL_SEARCH: S_SEARCH, penguintv.TAG_SEARCH: S_SEARCH, penguintv.MAJOR_DB_OPERATION: S_MAJOR_DB_OPERATION} newstate = d[newstate] if newstate == self._state: return self._unset_state(data) self._state = newstate def filter_all(self, keep_misfiltered=True): if utils.HAS_SEARCH and self.filter_setting == SEARCH: print "not filtering, we have search results" return False#not my job #gtk.gdk.threads_enter() selected = self.get_selected() index = self.find_index_of_item(selected) #stupid exception for when we don't have search if self.filter_setting > SEARCH:# or (not utils.HAS_SEARCH and self.filter_setting == SEARCH): feeds_with_tag = self._app.db.get_feeds_for_tag(self.filter_name) i=-1 for feed in self._feedlist: i=i+1 flag = feed[FLAG] passed_filter = False if self.filter_setting == DOWNLOADED: if flag & ptvDB.F_DOWNLOADED or flag & ptvDB.F_PAUSED: passed_filter = True elif self.filter_setting == NOTIFY: opts = self._app.db.get_flags_for_feed(feed[FEEDID]) if opts & ptvDB.FF_NOTIFYUPDATES: passed_filter = True elif self.filter_setting == ALL: passed_filter = True else: #tags = self._app.db.get_tags_for_feed(feed[FEEDID]) #if tags: # if self.filter_name in tags: # passed_filter = True if feed[FEEDID] in feeds_with_tag: passed_filter = True #so now we know if we passed the main filter, but we need to test for special cases where we keep it anyway #also, we still need to test for unviewed if i == index and selected is not None: #if it's the selected feed, we have to be careful if keep_misfiltered: #some cases when we want to keep the current feed visible if self._filter_unread == True and flag & ptvDB.F_UNVIEWED==0: #if it still fails the unviewed test passed_filter = True #keep it self._selecting_misfiltered=True elif self.filter_setting == DOWNLOADED and flag & ptvDB.F_DOWNLOADED == 0 and flag & ptvDB.F_PAUSED == 0: passed_filter = True self._selecting_misfiltered=True elif self.filter_setting == DOWNLOADED and flag & ptvDB.F_DOWNLOADING: passed_filter = True self._selecting_misfiltered=True if not passed_filter: self._widget.get_selection().unselect_all() #and clear out the entry list and entry view if self._fancy: feed[MARKUPTITLE] = self._get_fancy_markedup_title(feed[TITLE],feed[FIRSTENTRYTITLE],feed[UNREAD], feed[TOTAL], feed[FLAG], feed[FEEDID]) #self._app.display_feed(-1) self.emit('no-feed-selected') else: #if it's not the selected feed if self._filter_unread == True and flag & ptvDB.F_UNVIEWED==0: #and it fails unviewed passed_filter = False #see ya if feed[VISIBLE] != passed_filter: feed[VISIBLE] = passed_filter #note, this seems to change the selection! self._feed_filter.refilter() self.resize_columns() #gtk.gdk.threads_leave() return False def _filter_one(self,feed, keep_misfiltered=True): if utils.HAS_SEARCH and self.filter_setting == SEARCH: print "not filtering, we have search results" return #not my job selected = self.get_selected() s_index = self.find_index_of_item(selected) feed_index = self.find_index_of_item(feed[FEEDID]) flag = feed[FLAG] passed_filter = False if self.filter_setting == DOWNLOADED: if flag & ptvDB.F_DOWNLOADED or flag & ptvDB.F_PAUSED: passed_filter = True elif self.filter_setting == NOTIFY: opts = self._app.db.get_flags_for_feed(feed[FEEDID]) if opts & ptvDB.FF_NOTIFYUPDATES: passed_filter = True elif self.filter_setting == ALL: passed_filter = True else: tags = self._app.db.get_tags_for_feed(feed[FEEDID]) if tags: if self.filter_name in tags: passed_filter = True #so now we know if we passed the main filter, but we need to test for special cases where we keep it anyway #also, we still need to test for unviewed if feed_index == s_index and selected is not None: #if it's the selected feed, we have to be careful if keep_misfiltered: #some cases when we want to keep the current feed visible if self._filter_unread == True and flag & ptvDB.F_UNVIEWED==0: #if it still fails the unviewed test passed_filter = True #keep it self._selecting_misfiltered=True elif self.filter_setting == DOWNLOADED and flag & ptvDB.F_DOWNLOADED == 0 and flag & ptvDB.F_PAUSED == 0: passed_filter = True self._selecting_misfiltered=True elif self.filter_setting == DOWNLOADED and flag & ptvDB.F_DOWNLOADING: passed_filter = True self._selecting_misfiltered=True if not passed_filter: self._widget.get_selection().unselect_all() #and clear out the entry list and entry view #self._app.display_feed(-1) self.emit('no-feed-selected') else: #if it's not the selected feed if self._filter_unread == True and flag & ptvDB.F_UNVIEWED==0: #and it fails unviewed passed_filter = False #see ya if feed[VISIBLE] != passed_filter: feed[VISIBLE] = passed_filter #note, this seems to change the selection! self._feed_filter.refilter() def _load_details(self, visible_only=True): if visible_only: if self._loading_details == 1: yield False self._loading_details = 1 else: if self._loading_details > 0: yield False self._loading_details = 2 for row in self._feedlist: if self._cancel_load[1]: break if not visible_only and self._loading_details == 1: break if (row[VISIBLE] or not visible_only) and not row[DETAILS_LOADED]: then = time.time() try: row[FIRSTENTRYTITLE] = self._app.db.get_first_entry_title(row[FEEDID], True) except: row[FIRSTENTRYTITLE] = "" now = time.time() if now - then > 2 and not visible_only: #print "too slow, quit" break #row[PIXBUF] = self._get_pixbuf(row[FEEDID]) model, iter = self._widget.get_selection().get_selected() row[DETAILS_LOADED] = True try: selected = model[iter][FEEDID] except: selected = -1 row[MARKUPTITLE] = self._get_fancy_markedup_title(row[TITLE], row[FIRSTENTRYTITLE], row[UNREAD], row[TOTAL], row[FLAG], row[FEEDID]) #self.resize_columns() yield True if self._cancel_load[1]: self._cancel_load[1] = False self._loading_details = 0 if visible_only and len(self._feedlist) > 0 and self._fancy: #print "now loading everything else" gobject.timeout_add(500, self._load_details(visible_only=False).next) yield False def filter_test_feed(self, feed_id): """Tests a feed against the filters (although _not_ unviewed status testing)""" passed_filter = False try: flag = self._feedlist[self.find_index_of_item(feed_id)][FLAG] except: return False if self.filter_setting == DOWNLOADED: if flag & ptvDB.F_DOWNLOADED or flag & ptvDB.F_PAUSED: passed_filter = True elif self.filter_setting == NOTIFY: opts = self._app.db.get_flags_for_feed(feed[FEEDID]) if opts & ptvDB.FF_NOTIFYUPDATES: passed_filter = True elif self.filter_setting == ALL: passed_filter = True else: tags = self._app.db.get_tags_for_feed(feed_id) if tags: if self.filter_name in tags: passed_filter = True return passed_filter def on_row_activated(self, treeview, path, view_column): if utils.RUNNING_HILDON: #much too easy to doubleclick on hildon, disable return index = path[0] model = treeview.get_model() link = self._app.db.get_feed_info(model[index][FEEDID])['link'] if link is None: dialog = gtk.Dialog(title=_("No Homepage"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("There is no homepage associated with this feed. You can set one in the feed properties.")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.run() dialog.hide() del dialog #self._app.activate_link(link) self.emit('link-activated', link) def set_filter(self, new_filter, name): self.filter_setting = new_filter self.filter_name = name #if new_filter != SEARCH and self._state == S_SEARCH: # print "hope we also changed state" # self._app.set_state(penguintv.DEFAULT) self.filter_all(False) if self._fancy: gobject.idle_add(self._load_details().next) self._va.set_value(0) self.resize_columns() def set_fancy(self, fancy): if fancy == self._fancy: return #no need if self._state == S_MAJOR_DB_OPERATION: self.interrupt() while gtk.events_pending(): gtk.main_iteration() #self._app.set_state(penguintv.MAJOR_DB_OPERATION) self.emit('state-change', penguintv.MAJOR_DB_OPERATION) self._fancy = fancy if self._fancy: self._widget.append_column(self._image_column) self._icon_renderer.set_property('stock-size',gtk.ICON_SIZE_LARGE_TOOLBAR) self._widget.set_property('rules-hint', True) else: self._widget.remove_column(self._image_column) self._icon_renderer.set_property('stock-size',gtk.ICON_SIZE_SMALL_TOOLBAR) self._widget.set_property('rules-hint', False) if self._state == S_SEARCH: #self._app.set_state(penguintv.DEFAULT) self.emit('state-change', penguintv.DEFAULT) self._app.write_feed_cache() self.clear_list() self.populate_feeds(self._app._done_populating) self.resize_columns() def set_unread_toggle(self, active): if self._state == S_SEARCH: return self._filter_unread = active self.filter_all(False) if self._fancy: gobject.idle_add(self._load_details().next) self._va.set_value(0) def clear_list(self): self._feedlist.clear() def add_feed(self, feed_id): newlist = self._app.db.get_feedlist() index = [f[0] for f in newlist].index(feed_id) feed = newlist[index] #print "-----------ADDFEED---------" #print feed #print index #print newlist p = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, 10,10) p.fill(0xffffff00) #insert, not append self._feedlist.insert(index,[feed[1], feed[1], feed[0], 'gnome-stock-blank', "", p, False, 1, 1, 0, True, False, ""]) self.update_feed_list(feed_id) def remove_feed(self, feed_id): try: if feed_id == self._feedlist[self._last_feed][FEEDID]: self._last_feed = None self._feedlist.remove(self._feedlist.get_iter((self.find_index_of_item(feed_id),))) except: print "Error: feed not in list" def _on_button_press_event(self, widget, event): if event.button==3: #right click self.do_context_menu(widget, event) def _on_button_release_event(self, widget, event): if self.__displayed_context_menu: self.__displayed_context_menu = False return if event.button==1: self.emit('feed-clicked') def _get_context_menu(self, is_filter): menu = gtk.Menu() if is_filter and not utils.HAS_SEARCH: item = gtk.MenuItem(_("Search required for feed filters")) item.set_sensitive(False) menu.append(item) separator = gtk.SeparatorMenuItem() menu.append(separator) item = gtk.ImageMenuItem('gtk-refresh') item.connect('activate',self._app.main_window.on_refresh_activate) if is_filter and not utils.HAS_SEARCH: item.set_sensitive(False) menu.append(item) item = gtk.MenuItem(_("Mark as _Viewed")) item.connect('activate',self._app.main_window.on_mark_feed_as_viewed_activate) if is_filter and not utils.HAS_SEARCH: item.set_sensitive(False) menu.append(item) item = gtk.MenuItem(_("_Delete All Media")) item.connect('activate',self._app.main_window.on_delete_feed_media_activate) if is_filter and not utils.HAS_SEARCH: item.set_sensitive(False) menu.append(item) item = gtk.ImageMenuItem(_("_Remove Feed")) img = gtk.image_new_from_stock('gtk-remove',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate',self._app.main_window.on_remove_feed_activate) if self._state == S_MAJOR_DB_OPERATION: item.set_sensitive(False) menu.append(item) separator = gtk.SeparatorMenuItem() menu.append(separator) if not is_filter: if utils.HAS_SEARCH: item = gtk.MenuItem(_("_Create Feed Filter")) item.connect('activate',self._app.main_window.on_add_feed_filter_activate) if self._state == S_MAJOR_DB_OPERATION: item.set_sensitive(False) menu.append(item) item = gtk.ImageMenuItem('gtk-properties') item.connect('activate',self._app.main_window.on_feed_properties_activate) menu.append(item) else: item = gtk.ImageMenuItem('gtk-properties') item.connect('activate',self._app.main_window.on_feed_filter_properties_activate) if not utils.HAS_SEARCH: item.set_sensitive(False) menu.append(item) menu.show_all() def realized(o): self.__displayed_context_menu = True menu.connect('realize', realized) return menu def do_context_menu(self, widget, event): path = widget.get_path_at_pos(int(event.x),int(event.y)) model = widget.get_model() if path is None: #nothing selected return selected = model[path[0]][FEEDID] is_filter = self._app.db.is_feed_filter(selected) menu = self._get_context_menu(is_filter) menu.popup(None,None,None, event.button,event.time) def _get_icon(self, flag): if flag & ptvDB.F_ERROR == ptvDB.F_ERROR: return 'gtk-dialog-error' if flag & ptvDB.F_DOWNLOADING == ptvDB.F_DOWNLOADING: return 'gtk-execute' if flag & ptvDB.F_DOWNLOADED == ptvDB.F_DOWNLOADED: return 'gtk-harddisk' if flag & ptvDB.F_PAUSED: return 'gtk-media-pause' return 'gnome-stock-blank' def _get_markedup_title(self, title, flag): if not title: return _("Please wait...") if utils.RUNNING_SUGAR: title=''+title+'' elif utils.RUNNING_HILDON: title=''+title+'' try: if flag & ptvDB.F_UNVIEWED == ptvDB.F_UNVIEWED: title=""+utils.my_quote(title)+"" except: return title return title def _get_fancy_markedup_title(self, title, first_entry_title, unread, total, flag, feed_id, selected=None): #logging.debug("fancy title: %s %s %i %i %i %i", title, first_entry_title, unread, total, flag, feed_id) if selected is None: selection = self._widget.get_selection() model, iter = selection.get_selected() try: sel = model[iter][FEEDID] except: sel = -1 selected = feed_id == sel if not title: return _("Please wait...") try: if utils.RUNNING_HILDON: if not selected: title = ''+utils.my_quote(title)+'\n'+utils.my_quote(first_entry_title)+'' else: title = ''+utils.my_quote(title)+'\n'+utils.my_quote(first_entry_title)+'' else: if not selected: title = utils.my_quote(title)+'\n'+utils.my_quote(first_entry_title)+'' else: title = utils.my_quote(title)+'\n'+utils.my_quote(first_entry_title)+'' if flag & ptvDB.F_UNVIEWED == ptvDB.F_UNVIEWED: if unread == 0: logging.warning("Flag says there are unviewed, but count says no. not setting bold") else: title=""+title+'' except: return title return title def _pick_important_flag(self, feed_id, flag_list): """go through entries and pull out most important flag""" if len(flag_list)==0: return 0 entry_count = len(flag_list) important_flag = 0 media_exists = 0 for flag in flag_list: if flag & ptvDB.F_DOWNLOADED == ptvDB.F_DOWNLOADED: media_exists=1 break flag_list.sort() best_flag = flag_list[-1] if best_flag & ptvDB.F_DOWNLOADED == 0 and media_exists==1: #if there is an unread text-only entry, but all viewed media, #we need a special case (mixing flags from different entries) return best_flag + ptvDB.F_DOWNLOADED else: return best_flag def _item_selection_changed(self, selection): if self._fancy and self._last_feed is not None: try: old_item = self._feedlist[self._last_feed] old_item[MARKUPTITLE] = self._get_fancy_markedup_title(old_item[TITLE],old_item[FIRSTENTRYTITLE],old_item[UNREAD], old_item[TOTAL], old_item[FLAG], old_item[FEEDID], False) except: pass s = selection.get_selected() if s: model, iter = s if iter is None: self.emit('no-feed-selected') return unfiltered_iter = model.convert_iter_to_child_iter(iter) feed = self._feedlist[unfiltered_iter] else: self.emit('no-feed-selected') return self._last_feed=unfiltered_iter self._select_after_load=None if self._fancy: feed[MARKUPTITLE] = self._get_fancy_markedup_title(feed[TITLE],feed[FIRSTENTRYTITLE],feed[UNREAD], feed[TOTAL], feed[FLAG], feed[FEEDID], True) try: if self._feedlist[self.find_index_of_item(feed[FEEDID])][POLLFAIL]: self._app.display_custom_entry(""+_("There was an error trying to poll this feed.")+"") else: self._app.undisplay_custom_entry() except: self._app.undisplay_custom_entry() #if self._showing_search: if self._state == S_SEARCH: if feed[FEEDID] == self._last_selected: return self._last_selected = feed[FEEDID] if not self._app.entrylist_selecting_right_now(): self.emit('search-feed-selected', feed[FEEDID]) return if feed[FEEDID] == self._last_selected: self.emit('feed-selected', feed[FEEDID]) else: self._last_selected = feed[FEEDID] self.emit('feed-selected', feed[FEEDID]) if self._selecting_misfiltered and feed[FEEDID]!=None: self._selecting_misfiltered = False gobject.timeout_add(250, self.filter_all) def get_selected(self, selection=None): if selection==None: try: s = self._widget.get_selection().get_selected() except AttributeError: return None else: s = selection.get_selected() if s: model, iter = s if iter is None: return None path = model.get_path(iter) index = path[0] return model[index][FEEDID] else: return None def set_selected(self, feed_id): if feed_id is None: self._widget.get_selection().unselect_all() return visible = [f[FEEDID] for f in self._feedlist if f[VISIBLE]] index=None try: index = visible.index(feed_id) except: pass if index is None: if self.filter_setting != ALL: self._app.main_window.set_active_filter(ALL) #hmm.. self.set_selected(feed_id) return else: self._widget.get_selection().unselect_all() else: #FIXME: why are we crashing here sometimes??? self._widget.get_selection().select_path((index,)) self._widget.scroll_to_cell((index,)) def find_index_of_item(self, feed_id): try: i=-1 for feed in self._feedlist: i+=1 if feed_id == feed[FEEDID]: return i return None except: return None def get_feed_cache(self): return [[f[FEEDID],f[FLAG],f[UNREAD],f[TOTAL],f[POLLFAIL],f[FIRSTENTRYTITLE]] for f in self._feedlist] def interrupt(self): self._cancel_load = [True,True] PenguinTV-4.2.0/penguintv/Poller.py0000755000000000000000000000756411303252610014120 0ustar #!/usr/bin/env python #Out-of-process poller for PenguinTV #returns data over dbus import os import sys import logging import traceback try: import hildon RUNNING_HILDON = True except: RUNNING_HILDON = False import socket if RUNNING_HILDON: socket.setdefaulttimeout(30.0) else: socket.setdefaulttimeout(10.0) #logging.basicConfig(level=logging.DEBUG) #try: # import tempfile # logfile = tempfile.mkstemp(prefix='poller-',suffix='.log')[1] # logging.basicConfig(filename=logfile, filemode="a", level=logging.DEBUG) #except: # pass import dbus import dbus.service import dbus.mainloop.glib import gobject import ptvDB dbus.mainloop.glib.threads_init() dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) class Poller(dbus.service.Object): def __init__(self, remote_app, mainloop, bus, object_path="/PtvPoller"): dbus.service.Object.__init__(self, bus, object_path) logging.debug("poller startup") self._remote_app = remote_app self._db = ptvDB.ptvDB(self._polling_cb) self._poll_trigger = False self._quitting = False self._mainloop = mainloop gobject.timeout_add(15000, self._app_ping) def _app_ping(self): try: #logging.debug("ping") if not self._remote_app.Ping(): logging.debug("Poller exit, ping was false (app exiting)") self.exit() except Exception, e: logging.debug("ping exception %s" % str(e)) self.exit() return True def _polling_cb(self, args, cancelled=False): logging.debug("Poller calling back, %s" % (str(self._quitting))) #def go(args, cancelled): try: #logging.debug("tick1") if not self._remote_app.PollingCallback(str(args), cancelled): ##logging.debug("tick2") logging.debug("Poller exit, negative callback (exiting)") self.exit() return False except Exception, e: logging.debug("Poller exit, exception in callback: %s" % str(e)) self.exit() return False #gobject.timeout_add(100, go, args, cancelled) @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def poll_multiple(self, arguments, feeds, finished_cb): logging.debug("Poller starting poll mult") def go(arguments, feeds): total = self._db.poll_multiple(arguments, feeds) f = getattr(self._remote_app, finished_cb) f(total) return False gobject.idle_add(go, arguments, feeds) @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def poll_all(self, arguments, finished_cb): logging.debug("Poller starting poll all") def go(arguments): total = self._db.poll_multiple(arguments) f = getattr(self._remote_app, finished_cb) f(total) return False gobject.idle_add(go, arguments) @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def is_quitting(self): #logging.debug("is quitting?") return self._quitting @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def exit(self): logging.debug("exiting") self._quitting = True self._db.finish(False, False) self._mainloop.quit() return False @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def get_pid(self): return os.getpid() @dbus.service.method("com.ywwg.PenguinTVPoller.PollInterface") def ping(self): logging.debug("responding to ping") return True if __name__ == '__main__': # Here starts the dynamic part of the program bus = dbus.SessionBus() dubus = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/dbus') dubus_methods = dbus.Interface(dubus, 'org.freedesktop.DBus') if dubus_methods.NameHasOwner('com.ywwg.PenguinTV'): remote_object = bus.get_object("com.ywwg.PenguinTV", "/PtvApp") remote_app = dbus.Interface(remote_object, "com.ywwg.PenguinTV.AppInterface") else: logging.error("No running app found") sys.exit(1) bus = dbus.service.BusName("com.ywwg.PenguinTVPoller", bus=bus) loop = gobject.MainLoop() poller = Poller(remote_app, loop, bus) if RUNNING_HILDON: os.nice(15) logging.debug("mainloop") loop.run() logging.debug("quit") PenguinTV-4.2.0/penguintv/SimpleImageCache.py0000644000000000000000000000516711117761152016006 0ustar # Written by Owen Williams # see LICENSE for license information import logging import threading import pycurl __MAX_IMAGES__ = 250 class SimpleImageCache: """Dead simple. we keep all of the images _in_ram_ with a dictionary. OH YEAH""" def __init__(self): self.image_dict={} self.image_list=[] self._update_lock = threading.Lock() def is_cached(self, url): self._update_lock.acquire() if self.image_dict.has_key(url): self._update_lock.release() return True self._update_lock.release() return False def _check_cache(self, url): self._update_lock.acquire() if len(self.image_dict) > __MAX_IMAGES__: #flush it every so often url_to_delete = self.image_list.pop(0) self.image_dict.pop(url_to_delete) if self.image_dict.has_key(url): image = self.image_dict[url] self._update_lock.release() return image self._update_lock.release() return None def get_image_from_file(self, filename): url = "file://" + filename cache = self._check_cache(url) if cache is not None: #logging.debug("cached") return cache self._update_lock.acquire() try: f = open(filename, "rb") image = self.image_dict[url] = f.read() self.image_list.append(url) f.close() except Exception, e: logging.error("Error retrieving local file: %s" % (str(e),)) image = self.image_dict[url] = "" self.image_list.append(url) self._update_lock.release() return image def get_image(self, url): cache = self._check_cache(url) if cache is not None: return cache if url[0:4] == "file": #logging.debug("local") filename = url[7:] return self.get_image_from_file(filename) else: #logging.debug("remote") return self._get_http_image(url) def _get_http_image(self, url): d = SimpleImageCache.Downloader(url) d.download() self._update_lock.acquire() image = self.image_dict[url] = d.contents self.image_list.append(url) self._update_lock.release() return image class Downloader: def __init__(self, url): self._url = url self.contents = '' def download(self): c = pycurl.Curl() try: c.setopt(pycurl.URL, str(self._url).strip()) except Exception, e: logging.error("Error downloading file: %s %s" % (self._url,str(e))) return "" c.setopt(pycurl.WRITEFUNCTION, self.body_callback) c.setopt(pycurl.CONNECTTIMEOUT, 7) #aggressive timeouts c.setopt(pycurl.TIMEOUT, 20) #aggressive timeouts c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.NOSIGNAL, 1) #multithread ok try: c.perform() c.close() except: self.contents = "" def body_callback(self, buf): self.contents = self.contents + buf PenguinTV-4.2.0/penguintv/LoginDialog.py0000644000000000000000000000174610652134714015056 0ustar # Written by Owen Williams # see LICENSE for license information import gtk class LoginDialog(gtk.Dialog): def __init__(self,xml): self.xml = xml self._dialog = xml.get_widget("dialog_login") for key in dir(self.__class__): if key[:3] == 'on_': self.xml.signal_connect(key, getattr(self,key)) self._user_widget = self.xml.get_widget("user_widget") self._pass_widget = self.xml.get_widget("pass_widget") self.username = "" self.password = "" def run(self): self._user_widget.grab_focus() return self._dialog.run() def on_dialog_rename_feed_delete_event(self, widget, event): return self._dialog.hide_on_delete() def hide(self): self.username = self._user_widget.get_text() self.password = self._pass_widget.get_text() self._pass_widget.set_text("") self._dialog.hide() def on_user_widget_activate(self): print "user_act" self._pass_widget.grab_focus() def on_pass_widget_activate(self, event): self._dialog.response(gtk.RESPONSE_OK) PenguinTV-4.2.0/penguintv/S3SyncClient.py0000644000000000000000000000604711011620454015135 0ustar import logging import threading import gettext _=gettext.gettext from SqliteSyncClient import SqliteSyncClient from amazon import S3 #BUCKET_NAME will be prepended with access key BUCKET_NAME_SUF = '-penguintv-article-sync-db' KEYNAME = 'penguintv-syncdb-1' STAMP_KEYNAME = 'penguintv-syncdb-timestamp' class S3SyncClient(SqliteSyncClient): def __init__(self): SqliteSyncClient.__init__(self) self.__transfer_lock = threading.Lock() self._username = "" self._bucket = self._username.lower() + BUCKET_NAME_SUF self._conn = None def get_parameters(self): return [ (_("Access Key"), "username", "", False), (_("Secret Key"), "password", "", True) ] def set_username(self, username): SqliteSyncClient.set_username(self, username) self._bucket = self._username.lower() + BUCKET_NAME_SUF def _do_authenticate(self): self._conn = S3.AWSAuthConnection(self._username, self._password) #the only way to "authenticate" is to list buckets. if list is #empty, try creating the bucket. success? it worked! failure? #bad keys buckets = [x.name for x in self._conn.list_all_my_buckets().entries] if len(buckets) > 0: if self._bucket not in buckets: #try creating our bucket response = \ self._conn.create_located_bucket(self._bucket, S3.Location.DEFAULT) if response.http_response.status == 200: return True else: return False else: return True response = \ self._conn.create_located_bucket(self._bucket, S3.Location.DEFAULT) if response.http_response.status == 200: return True else: return False def _set_server_timestamp(self, timestamp): assert self._authenticated #logging.debug("TIMESTAMPING SUBMISSION: %i" % timestamp) resp = self._conn.put(self._bucket, STAMP_KEYNAME, str(timestamp)) if resp.http_response.status != 200: logging.error("error submitting timestamp") return False return True def _get_server_timestamp(self): assert self._authenticated self.__transfer_lock.acquire() resp = self._conn.get(self._bucket, STAMP_KEYNAME) if resp.http_response.status == 404: self._conn.put(self._bucket, STAMP_KEYNAME, "0") self.__transfer_lock.release() return 0 elif resp.http_response.status != 200: logging.error("couldn't get last submit time: %s" % resp.message) self.__transfer_lock.release() return 0 self.__transfer_lock.release() return int(resp.object.data) def _db_exists(self): response = self._conn.list_bucket(self._bucket) if response.http_response.status != 200: return False for entry in response.entries: if entry.key == KEYNAME: return True return False def _do_download_db(self): self.__transfer_lock.acquire() response = self._conn.get(self._bucket, KEYNAME) self.__transfer_lock.release() if response.http_response.status != 200: return None return response.object.data def _upload_db(self, fp): self.__transfer_lock.acquire() response = self._conn.put(self._bucket, KEYNAME, fp.read()) self.__transfer_lock.release() return response.http_response.status == 200 PenguinTV-4.2.0/penguintv/EditSearchesDialog.py0000644000000000000000000001165411175671471016357 0ustar # Written by Owen Williams # see LICENSE for license information import penguintv from ptvDB import TagAlreadyExists import gtk import utils class EditSearchesDialog: def __init__(self,glade_path,app): self._xml = gtk.glade.XML(glade_path, "window_edit_search_tags",'penguintv') self._modify_xml = gtk.glade.XML(glade_path, "dialog_modify_search_tag",'penguintv') self._app = app def on_remove_button_clicked(self, event): selection = self._saved_search_list_widget.get_selection() model, iter = selection.get_selected() if iter is not None: saved_item = model[iter] i=-1 for item in model: i+=1 if item[0] == saved_item[0]: break saved_pos = i self._app.remove_search_tag(model[iter][0]) self._populate_searches() if iter is not None: if saved_pos <= len(model): selection = self._saved_search_list_widget.get_selection() selection.select_path((saved_pos,)) return def on_add_button_clicked(self, event): self._add_search() def on_tag_name_entry_activate(self, event): self._app.change_search_tag() def on_query_entry_activate(self, event): self._app.change_search_tag() def on_tag_edit_done(self, renderer, path, new_text): model = self._saved_search_list_widget.get_model() self._app.change_search_tag(model[path][0], new_tag=new_text) model[path][0] = new_text def on_query_edit_done(self, renderer, path, new_text): model = self._saved_search_list_widget.get_model() self._app.change_search_tag(model[path][0], new_query=new_text) model[path][1] = new_text def on_modify_button_clicked(self, event): selection = self._saved_search_list_widget.get_selection() model, iter = selection.get_selected() if iter is None: return tag_name = model[iter][0] query = model[iter][1] self._mod_tag_name_entry.set_text(tag_name) self._mod_query_entry.set_text(query) response = self._mod_dialog.run() if response == gtk.RESPONSE_OK: new_name = self._mod_tag_name_entry.get_text() new_query = self._mod_query_entry.get_text() self._app.change_search_tag(tag_name, new_name, new_query) model[iter][0] = new_name model[iter][1] = new_query self._mod_dialog.hide() def on_close_button_clicked(self,event): self.hide() def on_window_edit_search_tags_destroy_event(self,data1,data2): self.hide() def on_window_edit_search_tags_delete_event(self, data1,data2): return self._window.hide_on_delete() def hide(self): self._window.hide() def show(self): self._window = self._xml.get_widget("window_edit_search_tags") self._window.set_transient_for(self._app.main_window.get_parent()) self._mod_dialog = self._modify_xml.get_widget("dialog_modify_search_tag") self._mod_tag_name_entry = self._modify_xml.get_widget("tag_name_entry") self._mod_query_entry = self._modify_xml.get_widget("query_entry") for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self._saved_search_list_widget = self._xml.get_widget("saved_search_list") model = gtk.ListStore(str, str) #tag name, query self._saved_search_list_widget.set_model(model) renderer = gtk.CellRendererText() renderer.set_property("editable",True) renderer.connect("edited", self.on_tag_edit_done) self._tag_column = gtk.TreeViewColumn('Search Tag Name') self._tag_column.pack_start(renderer, True) self._tag_column.set_attributes(renderer, markup=0) self._saved_search_list_widget.append_column(self._tag_column) renderer = gtk.CellRendererText() renderer.set_property("editable",True) renderer.connect("edited", self.on_query_edit_done) self._query_column = gtk.TreeViewColumn('Query') self._query_column.pack_start(renderer, True) self._query_column.set_attributes(renderer, markup=1) self._saved_search_list_widget.append_column(self._query_column) self._window.resize(500,500) self._window.show() self._populate_searches() def apply_tags(self): pass def _populate_searches(self): model = self._saved_search_list_widget.get_model() model.clear() searches = self._app.db.get_search_tags() if searches: for search in searches: model.append([search[0],search[1]]) def _add_search(self): current_query=_("New Query") current_tag=_("New Tag") def try_add_tag(basename, query, i=0): try: if i>0: #self._app.db.add_search_tag(query, basename+" "+str(i)) self._app.add_search_tag(query, basename+" "+str(i)) return basename+" "+str(i) #self._app.db.add_search_tag(query, basename) self._app.add_search_tag(query, basename+" "+str(i)) return basename except TagAlreadyExists, e: return try_add_tag(basename, query, i+1) current_tag = try_add_tag(current_tag, current_query) model = self._saved_search_list_widget.get_model() model.append([current_tag,current_query]) self._saved_search_list_widget.set_cursor(len(model)-1, self._tag_column, True) PenguinTV-4.2.0/penguintv/FeedPropertiesDialog.py0000644000000000000000000002376611367432716016743 0ustar # Written by Owen Williams # see LICENSE for license information import penguintv import utils from ptvDB import FeedAlreadyExists, FF_NOAUTODOWNLOAD, FF_NOSEARCH, \ FF_NOAUTOEXPIRE, FF_NOTIFYUPDATES, FF_ADDNEWLINES, \ FF_MARKASREAD, FF_NOKEEPDELETED, FF_DOWNLOADSINGLE import gtk import time, datetime from math import floor class FeedPropertiesDialog(gtk.Dialog): def __init__(self,xml,app): gtk.Dialog.__init__(self) self._xml = xml self._app = app #self._window = xml.get_widget("window_feed_properties") contents = xml.get_widget("feed_prop_contents") p = contents.get_parent() contents.unparent() self.vbox.add(contents) gtk.Dialog.set_title(self, p.get_title()) del p for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self.connect('delete-event', self.on_window_feed_properties_delete_event) self._title_widget = xml.get_widget('title_entry') self._rss_widget = xml.get_widget('rss_entry') self._link_widget = xml.get_widget('link_entry') self._description_widget = xml.get_widget('description_label') self._last_poll_widget = xml.get_widget('last_poll_label') self._next_poll_widget = xml.get_widget('next_poll_label') self._edit_tags_widget = xml.get_widget('edit_tags_widget') self._cur_flags = 0 self._old_title = "" self._old_rss = "" self._old_link = "" self._old_tags = [] self._old_flags = 0 self._feed_id=0 def show(self): if utils.RUNNING_HILDON: self.resize(600,200) self.show_all() self.set_transient_for(self._app.main_window.get_parent()) self._xml.get_widget('notebook1').set_current_page(0) if not utils.HAS_SEARCH: self._xml.get_widget('b_search').hide() if utils.RUNNING_SUGAR or not utils.HAS_STATUS_ICON: self._xml.get_widget('b_notifyupdates').hide() #if not utils.USE_TAGGING # self._edit_tags_widget.hide() self._title_widget.grab_focus() gtk.Dialog.show(self) def set_feedid(self, id): self._feed_id = id def set_title(self, title): if title is None: title="" self._title_widget.set_text(title) self._old_title = title def set_rss(self, rss): if rss is None: rss="" self._rss_widget.set_text(rss) self._old_rss = rss def set_description(self, desc): if desc is None: desc = "" self._description_widget.set_text(desc) def set_link(self, link): if link is None: link = "" self._link_widget.set_text(link) self._old_link = link def set_last_poll(self, lastpoll): self._last_poll_widget.set_text(time.strftime("%X",time.localtime(lastpoll))) def set_next_poll(self, nextpoll): if nextpoll <= time.time(): self._next_poll_widget.set_text(_("Momentarily")) else: delta = datetime.timedelta(seconds=nextpoll-time.time()) d = {'hours':int(floor(delta.seconds/3600)), 'mins':int((delta.seconds-(floor(delta.seconds/3600)*3600))/60)} self._next_poll_widget.set_text(_("in approx %(hours)sh %(mins)sm") % d) def set_tags(self, tags): text = "" if tags: for tag in tags: text=text+tag+", " text = text[0:-2] self._edit_tags_widget.set_text(text) self._old_tags = tags def set_flags(self, flags): self._old_flags = self._cur_flags = flags #reversed if flags & FF_NOAUTODOWNLOAD == FF_NOAUTODOWNLOAD: self._xml.get_widget('b_autodownload').set_active(False) else: self._xml.get_widget('b_autodownload').set_active(True) #reversed if flags & FF_NOSEARCH == FF_NOSEARCH: self._xml.get_widget('b_search').set_active(False) else: self._xml.get_widget('b_search').set_active(True) if flags & FF_NOAUTOEXPIRE == FF_NOAUTOEXPIRE: self._xml.get_widget('b_noautoexpire').set_active(True) else: self._xml.get_widget('b_noautoexpire').set_active(False) if flags & FF_NOTIFYUPDATES == FF_NOTIFYUPDATES: self._xml.get_widget('b_notifyupdates').set_active(True) else: self._xml.get_widget('b_notifyupdates').set_active(False) if flags & FF_ADDNEWLINES == FF_ADDNEWLINES: self._xml.get_widget('b_addnewlines').set_active(True) else: self._xml.get_widget('b_addnewlines').set_active(False) if flags & FF_MARKASREAD == FF_MARKASREAD: self._xml.get_widget('b_markasread').set_active(True) else: self._xml.get_widget('b_markasread').set_active(False) if flags & FF_NOKEEPDELETED == FF_NOKEEPDELETED: self._xml.get_widget('b_nokeepdeleted').set_active(True) else: self._xml.get_widget('b_nokeepdeleted').set_active(False) if flags & FF_DOWNLOADSINGLE == FF_DOWNLOADSINGLE: self._xml.get_widget('b_downloadsingle').set_active(True) else: self._xml.get_widget('b_downloadsingle').set_active(False) def on_window_feed_properties_delete_event(self, widget, event): return self.hide_on_delete() def on_b_autodownload_toggled(self, b_autodownload): # reverse the polarity! noautodownload = not b_autodownload.get_active() if noautodownload: if not self._cur_flags & FF_NOAUTODOWNLOAD == FF_NOAUTODOWNLOAD: self._cur_flags += FF_NOAUTODOWNLOAD self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) else: if self._cur_flags & FF_NOAUTODOWNLOAD == FF_NOAUTODOWNLOAD: self._cur_flags -= FF_NOAUTODOWNLOAD self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) def on_b_search_toggled(self, b_search): # reverse the polarity! nosearch = not b_search.get_active() if nosearch: if not self._cur_flags & FF_NOSEARCH == FF_NOSEARCH: self._cur_flags += FF_NOSEARCH self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) else: if self._cur_flags & FF_NOSEARCH == FF_NOSEARCH: self._cur_flags -= FF_NOSEARCH self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) def on_b_notifyupdates_toggled(self, b_notifyupdates): if b_notifyupdates.get_active(): if not self._cur_flags & FF_NOTIFYUPDATES == FF_NOTIFYUPDATES: self._cur_flags += FF_NOTIFYUPDATES self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('notify-tags-changed') else: if self._cur_flags & FF_NOTIFYUPDATES == FF_NOTIFYUPDATES: self._cur_flags -= FF_NOTIFYUPDATES self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('notify-tags-changed') def on_b_noautoexpire_toggled(self, b_noautoexpire): if b_noautoexpire.get_active(): if not self._cur_flags & FF_NOAUTOEXPIRE == FF_NOAUTOEXPIRE: self._cur_flags += FF_NOAUTOEXPIRE self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) else: if self._cur_flags & FF_NOAUTOEXPIRE == FF_NOAUTOEXPIRE: self._cur_flags -= FF_NOAUTOEXPIRE self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) def on_b_nokeepdeleted_toggled(self, b_nokeepdeleted): if b_nokeepdeleted.get_active(): if not self._cur_flags & FF_NOKEEPDELETED == FF_NOKEEPDELETED: self._cur_flags += FF_NOKEEPDELETED self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) else: if self._cur_flags & FF_NOKEEPDELETED == FF_NOKEEPDELETED: self._cur_flags -= FF_NOKEEPDELETED self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) def on_b_addnewlines_toggled(self, b_addnewlines): if b_addnewlines.get_active(): if not self._cur_flags & FF_ADDNEWLINES == FF_ADDNEWLINES: self._cur_flags += FF_ADDNEWLINES self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('render-ops-updated') else: if self._cur_flags & FF_ADDNEWLINES == FF_ADDNEWLINES: self._cur_flags -= FF_ADDNEWLINES self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('render-ops-updated') def on_b_markasread_toggled(self, b_markasread): if b_markasread.get_active(): if not self._cur_flags & FF_MARKASREAD == FF_MARKASREAD: self._cur_flags += FF_MARKASREAD self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('render-ops-updated') else: if self._cur_flags & FF_MARKASREAD == FF_MARKASREAD: self._cur_flags -= FF_MARKASREAD self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) self._app.emit('render-ops-updated') def on_b_downloadsingle_toggled(self, b_downloadsingle): if b_downloadsingle.get_active(): if not self._cur_flags & FF_DOWNLOADSINGLE == FF_DOWNLOADSINGLE: self._cur_flags += FF_DOWNLOADSINGLE self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) else: if self._cur_flags & FF_DOWNLOADSINGLE == FF_DOWNLOADSINGLE: self._cur_flags -= FF_DOWNLOADSINGLE self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) def on_save_values_activate(self, event): new_title = self._title_widget.get_text() if new_title != self._old_title: #self._app.db.set_feed_name(self._feed_id,new_title) self._app.rename_feed(self._feed_id, new_title) self._old_title = new_title new_rss = self._rss_widget.get_text() if new_rss != self._old_rss: try: self._app.db.set_feed_url(self._feed_id, new_rss) self._old_rss = new_rss except FeedAlreadyExists: dialog = gtk.Dialog(title=_("URL Already in Use"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("A feed already exists with that URL. Please use a different URL.")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog self._rss_widget.grab_focus() return False new_link = self._link_widget.get_text() if new_link != self._old_link: self._app.db.set_feed_link(self._feed_id, new_link) self._old_link = new_link tags=[tag.strip() for tag in self._edit_tags_widget.get_text().split(',')] self._app.apply_tags_to_feed(self._feed_id, self._old_tags, tags) self._app.db.set_flags_for_feed(self._feed_id, self._cur_flags) return True def on_close_button_clicked(self,event): self._finish() def on_revert_button_clicked(self, event): self.set_title(self._old_title) self.set_rss(self._old_rss) self.set_link(self._old_link) self.set_flags(self._old_flags) def _finish(self): if self.on_save_values_activate(None): self.hide() PenguinTV-4.2.0/penguintv/GStreamerPlayer.py0000755000000000000000000010537511401461135015733 0ustar #!/usr/bin/env python #a basic gstreamer-based player. Can run standalone or inside the widget of your choice #much help from the mesk player code, totem, and most of all, google.com/codesearch #Copyright 2006, Owen Williams #License: GPL import sys,os,os.path import pickle import urllib import getopt from math import ceil, floor import time import pygst pygst.require("0.10") import gst from gst.extend.discoverer import Discoverer import pygtk pygtk.require("2.0") import gtk import gobject import locale import gettext import logging logging.basicConfig(level=logging.DEBUG) #locale.setlocale(locale.LC_ALL, '') _=gettext.gettext if os.environ.has_key('SUGAR_PENGUINTV'): RUNNING_SUGAR = True else: RUNNING_SUGAR = False try: import hildon RUNNING_HILDON = True except: RUNNING_HILDON = False class GStreamerPlayer(gobject.GObject): __gsignals__ = { 'playing': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'paused': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'tick': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'item-queued': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [str, str, gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT]), 'item-not-supported': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [str, str, gobject.TYPE_PYOBJECT]), 'items-removed': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, []) } def __init__(self, layout_dock, playlist, tick_interval=1): gobject.GObject.__init__(self) self._layout_dock = layout_dock self._playlist_name = playlist self._media_duration = 0 self._media_position = 0 self._last_tick = 0 self._tick_interval = tick_interval * gst.SECOND self._last_index = -1 self._current_index = 0 #index to tree model self._resized_pane = False self.__no_seek = False self.__is_exposed = False self._x_overlay = None self._prepare_save = False self._do_stop_resume = False self._has_video = False #self._using_playbin2 = True #TODO: wait until playbin2 isn't buggy, then reenable self._using_playbin2 = False self._error_dialog = GStreamerErrorDialog() gobject.timeout_add(300000, self._periodic_save_cb) ###public functions### def Show(self): main_vbox = gtk.VBox() vbox = gtk.VBox() self._hpaned = gtk.HPaned() self._player_vbox = gtk.VBox() self._drawing_area = gtk.DrawingArea() color = gtk.gdk.Color(0, 0, 0) self._drawing_area.modify_bg(gtk.STATE_NORMAL, color) self._drawing_area.connect('expose-event', self._on_drawing_area_exposed) self._player_vbox.pack_start(self._drawing_area) vbox.pack_start(self._player_vbox, True) self._seek_scale = gtk.HScale() self._seek_scale.set_range(0, 1) self._seek_scale.set_draw_value(False) self._seek_scale.connect('value-changed', self._on_seek_value_changed) vbox.pack_start(self._seek_scale, False) self._hpaned.add1(vbox) self._sidepane_vbox = gtk.VBox() s_w = gtk.ScrolledWindow() s_w.set_shadow_type(gtk.SHADOW_IN) s_w.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) if RUNNING_HILDON: hildon.hildon_helper_set_thumb_scrollbar(s_w, True) self._queue_listview = gtk.TreeView() model = gtk.ListStore(str, str, str, gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT) #uri, title to display, current track indicator, current pos, user data self._queue_listview.set_model(model) column = gtk.TreeViewColumn(_("")) renderer = gtk.CellRendererText() column.pack_start(renderer, True) column.set_attributes(renderer, markup=2) self._queue_listview.append_column(column) column = gtk.TreeViewColumn(_("Playlist")) renderer = gtk.CellRendererText() column.pack_start(renderer, True) column.set_attributes(renderer, markup=1) self._queue_listview.append_column(column) self._queue_listview.connect('row-activated', self._on_queue_row_activated) self._queue_listview.connect('button-press-event', self._on_queue_row_button_press) self._queue_listview.get_selection().set_mode(gtk.SELECTION_MULTIPLE) #dnd reorder self._TARGET_TYPE_REORDER = 80 self._TARGET_TYPE_URI_LIST = 81 drop_types = [('reorder',gtk.TARGET_SAME_WIDGET, self._TARGET_TYPE_REORDER), ('text/uri-list',0,self._TARGET_TYPE_URI_LIST)] #for removing items from favorites and reordering self._queue_listview.enable_model_drag_source(gtk.gdk.BUTTON1_MASK, drop_types, gtk.gdk.ACTION_MOVE) self._queue_listview.enable_model_drag_dest(drop_types, gtk.gdk.ACTION_DEFAULT) self._queue_listview.connect('drag-data-received', self._on_queue_drag_data_received) s_w.add(self._queue_listview) self._sidepane_vbox.pack_start(s_w, True) button = gtk.Button(stock='gtk-remove') button.connect("clicked", self._on_remove_clicked) if not RUNNING_HILDON: button_box = gtk.HButtonBox() button_box.set_property('layout-style', gtk.BUTTONBOX_END) button_box.add(button) self._sidepane_vbox.pack_start(button_box, False) else: list_bottom_hbox = gtk.HBox(False) list_bottom_hbox.pack_start(button, False) self._sidepane_vbox.pack_start(list_bottom_hbox, False) self._hpaned.add2(self._sidepane_vbox) main_vbox.add(self._hpaned) self._controls_hbox = gtk.HBox() self._controls_hbox.set_spacing(6) button_box = gtk.HButtonBox() button_box.set_homogeneous(False) button_box.set_property('layout-style', gtk.BUTTONBOX_START) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-previous",gtk.ICON_SIZE_BUTTON) button = gtk.Button() button.set_image(image) else: button = gtk.Button() label = gtk.Label(_("Prev")) label.set_use_markup(True) button.add(label) label.show() button.set_property('can-focus', False) button.connect("clicked", self._on_prev_clicked) button_box.pack_start(button, True, True) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-rewind",gtk.ICON_SIZE_BUTTON) button = gtk.Button() button.set_image(image) else: button = gtk.Button(_("Rew")) button.set_property('can-focus', False) button.connect("clicked", self._on_rewind_clicked) button_box.pack_start(button, True, True) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-play",gtk.ICON_SIZE_BUTTON) self._play_pause_button = gtk.Button() self._play_pause_button.set_image(image) else: self._play_pause_button = gtk.Button(_("Play")) self._play_pause_button.set_property('can-focus', False) self._play_pause_button.connect("clicked", self._on_play_pause_toggle_clicked) button_box.pack_start(self._play_pause_button, True, True) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-stop",gtk.ICON_SIZE_BUTTON) button = gtk.Button() button.set_image(image) else: button = gtk.Button(_("Stop")) button.set_property('can-focus', False) button.connect("clicked", self._on_stop_clicked) button_box.pack_start(button, True, True) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-forward",gtk.ICON_SIZE_BUTTON) button = gtk.Button() button.set_image(image) else: button = gtk.Button(_("FF")) button.set_property('can-focus', False) button.connect("clicked", self._on_forward_clicked) button_box.pack_start(button, True, True) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-next",gtk.ICON_SIZE_BUTTON) button = gtk.Button() button.set_image(image) else: button = gtk.Button(_("Next")) button.set_property('can-focus', False) button.connect("clicked", self._on_next_clicked) button_box.pack_start(button, True, True) self._controls_hbox.pack_start(button_box, False) self._time_label = gtk.Label("") self._time_label.set_alignment(0.0,0.5) if not RUNNING_HILDON: self._controls_hbox.pack_start(self._time_label, True) else: list_bottom_hbox.pack_start(self._time_label, True) list_bottom_hbox.reorder_child(self._time_label, 0) main_vbox.pack_start(self._controls_hbox, False) self._layout_dock.add(main_vbox) self.gstreamer_init() self._layout_dock.show_all() def gstreamer_init(self): if self._using_playbin2: try: self._pipeline = gst.element_factory_make("playbin2", "ptv_bin") except: self._using_playbin2 = False self._pipeline = gst.element_factory_make("playbin", "ptv_bin") else: self._pipeline = gst.element_factory_make("playbin", "ptv_bin") #use default audio sink, but get our own video sink self._v_sink = self._get_video_sink() self._pipeline.set_property('video-sink',self._v_sink) #if RUNNING_HILDON: # self._mp3_sink = gst.element_factory_make('dspmp3sink', 'mp3sink') # self._pcm_sink = gst.element_factory_make('dsppcmsink', 'pcmsink') bus = self._pipeline.get_bus() bus.add_signal_watch() bus.connect('message', self._on_gst_message) #bus.connect('sync-message::element', self._on_sync_message) def get_widget(self): return self._layout_dock def is_exposed(self): return self.__is_exposed def has_video(self): return self._has_video def detach(self): """video window can detach. queue stays embedded""" pass def reattach(self): """hides external window and reinits embedded window""" pass def toggle_controls(self, show_controls): if not show_controls: self._controls_hbox.show() self._seek_scale.show() self._sidepane_vbox.show() else: self._controls_hbox.hide() self._seek_scale.hide() self._sidepane_vbox.hide() def load(self): try: playlist = open(self._playlist_name, 'r') except: print "error reading playlist" return try: self._current_index = pickle.load(playlist) self._last_index = -1 self._media_position = pickle.load(playlist) self._media_duration = pickle.load(playlist) l = pickle.load(playlist) model = self._queue_listview.get_model() for uri, name, pos, userdata in l: model.append([uri, name, "", pos, userdata]) filename = gst.uri_get_location(uri) self.emit('item-queued', filename, name, pos, userdata) if self.__is_exposed: if not self._seek_in_ready(self._media_position): #retry once self._seek_in_ready(self._media_position) except ValueError, e: logging.warning("Playlist has incorrect format, unable to load") playlist.close() def save(self): """saves playlist""" try: playlist = open(self._playlist_name, 'w') except Exception, e: print "error writing playlist: %s" % str(e) return pickle.dump(self._current_index, playlist) pickle.dump(self._media_position, playlist) pickle.dump(self._media_duration, playlist) l = [] for uri, name, current, pos, userdata in self._queue_listview.get_model(): l.append([uri, name, pos, userdata]) pickle.dump(l, playlist) playlist.close() def queue_file(self, filename, name=None, pos=0, userdata=None): try: os.stat(filename) except: print "file not found",filename return if name is None: name = os.path.split(filename)[1] if RUNNING_HILDON: ext = os.path.splitext(filename)[1][1:] known_good = ['mp3', 'wav', 'm4a', 'wma', 'mpg', 'avi', '3gp', 'rm', 'asf', 'mp4'] try: gst.element_factory_make("oggdemux", "test") known_good += ['ogg'] except: pass self._on_type_discovered(None, ext in known_good, filename, name, pos, userdata) else: #thanks gstfile.py d = Discoverer(filename) d.connect('discovered', self._on_type_discovered, filename, name, pos, userdata) d.discover() def unqueue(self, filename=None, userdata=None): model = self._queue_listview.get_model() iter_list = [] if filename is not None: logging.warning("UNTESTED CODE:") it = model.get_iter_first() while it is not None: data = model.get(it, 0)[0] logging.debug("%s %s" % (str(filename), str(data))) if data == "file://" + filename: iter_list.append(it) it = model.iter_next(it) if len(iter_list) > 0: self._remove_items(iter_list) if userdata is not None: it = model.get_iter_first() while it is not None: data = model.get(it, 3)[0] if data == userdata: iter_list.append(it) it = model.iter_next(it) if len(iter_list) > 0: self._remove_items(iter_list) def relocate_media(self, old_dir, new_dir): if old_dir[-1] == '/' or old_dir[-1] == '\\': old_dir = old_dir[:-1] if new_dir[-1] == '/' or new_dir[-1] == '\\': new_dir = new_dir[:-1] model = self._queue_listview.get_model() if len(model) == 0: return self.stop() for row in model: if row[0].startswith("file://" + old_dir): row[0] = row[0].replace(old_dir, new_dir) def get_queue_count(self): return len(self._queue_listview.get_model()) def get_queue(self): return list(self._queue_listview.get_model()) def play_pause_toggle(self): if self._pipeline.get_state()[1] == gst.STATE_PLAYING: self.pause() else: self.play() def play(self, notick=False, doseek=False): model = self._queue_listview.get_model() if len(model) == 0: return if self._current_index < 0: self._current_index = 0 uri, title, current, pos, userdata = list(model[self._current_index]) if self._last_index != self._current_index: self._last_index = self._current_index selection = self._queue_listview.get_selection() i = -1 for row in model: i+=1 if i == self._current_index: row[2] = "•" #bullet else: row[2] = "" if not self._ready_new_uri(uri): return self._prepare_display() self._prepare_save = True if doseek: self._seek_in_ready(pos) else: if self._do_stop_resume: self._do_stop_resume = False self._prepare_display() if not self._seek_in_ready(self._media_position): #gstreamer error, recall ourselves self.play() return self._pipeline.set_state(gst.STATE_PLAYING) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-pause",gtk.ICON_SIZE_BUTTON) self._play_pause_button.set_image(image) else: self._play_pause_button.set_label(_("Pause")) self._media_duration = -1 if not notick: gobject.timeout_add(500, self._tick) #self._pipeline.get_property('stream-info') self.emit('playing') def pause(self): try: self._media_position = self._pipeline.query_position(gst.FORMAT_TIME)[0] except: pass self._pipeline.set_state(gst.STATE_PAUSED) if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-play",gtk.ICON_SIZE_BUTTON) self._play_pause_button.set_image(image) else: self._play_pause_button.set_label(_("Play")) self.emit('paused') def stop(self): #this should release the port, but I hate having a stop button on a computer #because it doesn't make sense. #unbreak: when video is stopped, we need this option to keep the window black self._drawing_area.set_flags(gtk.DOUBLE_BUFFERED) try: self._media_position = self._pipeline.query_position(gst.FORMAT_TIME)[0] except: pass self._pipeline.set_state(gst.STATE_READY) #self._last_index = -1 self._seek_scale.set_range(0,1) self._seek_scale.set_value(0) self._do_stop_resume = True if not RUNNING_HILDON: image = gtk.Image() image.set_from_stock("gtk-media-play",gtk.ICON_SIZE_BUTTON) self._play_pause_button.set_image(image) else: self._play_pause_button.set_label(_("Play")) if 'gstxvimagesink' in str(type(self._v_sink)).lower(): #release the xv port self._pipeline.unlink(self._v_sink) self._v_sink.set_state(gst.STATE_NULL) self.emit('paused') def ff(self): if self._pipeline.get_state()[1] != gst.STATE_PLAYING: return new_pos = self._media_position + 15 * gst.SECOND if new_pos > self._media_duration: new_pos = self._media_duration self.seek(new_pos) def rew(self): if self._pipeline.get_state()[1] != gst.STATE_PLAYING: return new_pos = self._media_position - 5 * gst.SECOND if new_pos < 0: new_pos = 0 self.seek(new_pos) def next(self): model = self._queue_listview.get_model() selection = self._queue_listview.get_selection() self._pipeline.set_state(gst.STATE_READY) self._current_index += 1 if self._current_index >= len(model): self._current_index = 0 selection.unselect_all() selection.select_path((self._current_index,)) self._seek_scale.set_range(0,1) self._seek_scale.set_value(0) self._do_stop_resume = False self.play(doseek=True) def prev(self): if (self._media_position > 5 * gst.SECOND): self.seek(0) else: selection = self._queue_listview.get_selection() self._pipeline.set_state(gst.STATE_READY) self._current_index -= 1 if self._current_index <= 0: self._current_index = 0 self.seek(0) selection.unselect_all() selection.select_path((self._current_index,)) self._seek_scale.set_range(0,1) self._seek_scale.set_value(0) self._do_stop_resume = False self.play(doseek=True) def finish(self): self.save() self._pipeline.set_state(gst.STATE_READY) def seek(self, time): return self._pipeline.seek(1.0, gst.FORMAT_TIME, gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_ACCURATE, gst.SEEK_TYPE_SET, time, gst.SEEK_TYPE_NONE, 0) def vol_up(self): old_vol = floor(self._pipeline.get_property('volume')) if old_vol < 10: self._pipeline.set_property('volume', old_vol + 1) def vol_down(self): old_vol = ceil(self._pipeline.get_property('volume')) if old_vol > 0: self._pipeline.set_property('volume', old_vol - 1) ###handlers### def _on_gst_message(self, bus, message): #print str(message) if message.type == gst.MESSAGE_STATE_CHANGED: prev, new, pending = message.parse_state_changed() if new == gst.STATE_PLAYING: if not self._resized_pane: self._resize_pane() if message.type == gst.MESSAGE_EOS: model = self._queue_listview.get_model() if self._current_index < len(model) - 1: self._update_position(0) self.next() else: self._update_position(0) self.stop() elif message.type == gst.MESSAGE_ERROR: gerror, debug = message.parse_error() logging.error("GSTREAMER ERROR: %s" % debug) if not RUNNING_HILDON: self._error_dialog.show_error(debug) def _on_play_clicked(self, b): self.play() def _on_pause_clicked(self, b): self.pause() def _on_play_pause_toggle_clicked(self, b): self.play_pause_toggle() def _on_stop_clicked(self, b): self.stop() def _on_rewind_clicked(self, b): self.rew() def _on_forward_clicked(self, b): self.ff() def _on_next_clicked(self, b): self.next() def _on_prev_clicked(self, b): self.prev() def _on_type_discovered(self, discoverer, ismedia, filename, name, pos, userdata): if ismedia: model = self._queue_listview.get_model() uri = 'file://'+urllib.quote(str(filename)) if RUNNING_SUGAR or RUNNING_HILDON: name = ''+name+'' model.append([uri, name, "", pos, userdata]) self.emit('item-queued', filename, name, pos, userdata) self.save() else: self.emit('item-not-supported', filename, name, userdata) def _on_remove_clicked(self, b): model, paths = self._queue_listview.get_selection().get_selected_rows() self._remove_items([model.get_iter(path) for path in paths]) def _remove_items(self, iter_list): model = self._queue_listview.get_model() current_uri = model[self._current_index][0] for i in iter_list: if model.get_path(i)[0] == self._current_index: self.stop() model.remove(i) if len(model) == 0: self._last_index = -1 self._current_index = 0 self._media_position = 0 self._media_duration = 0 self._update_time_label() else: try: self._current_index = [r[0] for r in model].index(current_uri) self._last_index = self._current_index except ValueError: # If the current_uri was removed, reset to top of list self._current_index = 0 self._last_index = -1 self._media_position = 0 self._media_duration = 0 self._update_time_label() self.emit('items-removed') def _on_seek_value_changed(self, widget): if self.__no_seek: return pos = widget.get_value() self.seek(pos) def _on_queue_row_activated(self, treeview, path, view_column): self.pause() self._last_index = -1 self._current_index = path[0] self.play(doseek=True) def _on_queue_row_button_press(self, widget, event): if event.button==3: #right click menu = gtk.Menu() path = widget.get_path_at_pos(int(event.x),int(event.y)) model = widget.get_model() if path is None: #nothing selected return item = gtk.ImageMenuItem(_("_Remove")) img = gtk.image_new_from_stock('gtk-remove',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate',self._on_remove_clicked) menu.append(item) menu.show_all() menu.popup(None,None,None, event.button,event.time) def _on_key_press_event(self, widget, event): keyname = gtk.gdk.keyval_name(event.keyval) self.handle_key(keyname) def handle_key(self, keyname): #if keyname == 'f': # self.toggle_fullscreen() if keyname == 'n': self.next() elif keyname == 'b': self.prev() elif keyname == 'space' or keyname == 'p': self.play_pause_toggle() #FIXME: these don't work when we're embedded in penguintv. why? elif keyname == 'Right': self.ff() elif keyname == 'Left': self.rew() else: return False return True def _on_drawing_area_exposed(self, widget, event): if self._x_overlay is None: self._x_overlay = self._pipeline.get_by_interface(gst.interfaces.XOverlay) self._v_sink.expose() if not self.__is_exposed: model = self._queue_listview.get_model() if len(model) > 0: #self._prepare_display() if not self._seek_in_ready(self._media_position): #retry once self._seek_in_ready(self._media_position) self.__is_exposed = True ###utility functions### def _get_video_sink(self, compat=False): if RUNNING_HILDON: if compat: return none try: v_sink = self._pipeline.get_by_name('videosink') if v_sink is None: v_sink = gst.element_factory_make("xvimagesink", "v_sink") bus = self._pipeline.get_bus() v_sink.set_bus(bus) return v_sink else: return v_sink except: logging.error("didn't get videosink :(") return None if compat: sinks = ["ximagesink"] else: sinks = ["xvimagesink","ximagesink"] for sink_str in sinks: try: v_sink = gst.element_factory_make(sink_str, "v_sink") break except: print "couldn't init ",sink_str #according to totem this helps set things up (bacon-video-widget-gst-0.10:4290) bus = self._pipeline.get_bus() v_sink.set_bus(bus) return v_sink def _ready_new_uri(self, uri): """load a new uri into the pipeline and prepare the pipeline for playing""" #if RUNNING_HILDON: # ext = os.path.splitext(uri)[1] # #if ext == '.mp3': # # logging.debug("readying for mp3") # # self._pipeline.set_property('audio-sink', self._mp3_sink) # #else: # # logging.debug("readying for pcm") # # self._pipeline.set_property('audio-sink', self._pcm_sink) self._pipeline.set_state(gst.STATE_READY) self._pipeline.set_property('uri',uri) self._x_overlay = None #reset so we grab again when we start playing return True def _prepare_display(self, compat=False): #if type(self._v_sink) != GstXVImageSink and not compat: #do this right at some point: if we are using a substandard sink #and we're not being specifically told to use it, try the better one if 'gstximagesink' in str(type(self._v_sink)).lower() and not compat: self._v_sink = self._get_video_sink() self._pipeline.set_property('video-sink',self._v_sink) if compat: self._v_sink = self._get_video_sink(True) self._pipeline.set_property('video-sink',self._v_sink) self._v_sink.set_state(gst.STATE_READY) change_return, state, pending = self._v_sink.get_state(gst.SECOND * 10) if change_return != gst.STATE_CHANGE_SUCCESS: if 'gstximagesink' in str(type(self._v_sink)).lower(): print "couldn't find a valid video sink (do something!)" return #well that didn't work, try again with compatibility sink self._v_sink = self._get_video_sink(True) self._pipeline.set_property('video-sink',self._v_sink) self._prepare_display(True) return #maemo throws X Window System errors when doing this -- ignore them #http://labs.morpheuz.eng.br/blog/14/08/2007/xv-and-mplayer-on-maemo/ if RUNNING_HILDON: gtk.gdk.error_trap_push() self._v_sink.set_xwindow_id(self._drawing_area.window.xid) #causes expose problems #self._v_sink.set_property('sync', True) self._v_sink.set_property('force-aspect-ratio', True) self._resized_pane = False if RUNNING_HILDON: def pop_trap(): gtk.gdk.flush() gtk.gdk.error_trap_pop() return False gobject.idle_add(pop_trap) def _resize_pane(self): #get video width and height so we can resize the pane #see totem #if (!(caps = gst_pad_get_negotiated_caps (pad))) #unbreakme: if there's no video, it doesn't draw right here either self._drawing_area.set_flags(gtk.DOUBLE_BUFFERED) min_width = 200 max_width = self._hpaned.get_allocation().width - 200 #-200 for the list box pad = self._v_sink.get_pad('sink') if pad is None: logging.debug("didn't get video sink pad") return self._resized_pane = True caps = pad.get_negotiated_caps() if caps is None: #no big deal, this might be audio only self._hpaned.set_position(max_width / 2) self._has_video = False return #maemo throws X Window System errors when doing this -- ignore them #http://labs.morpheuz.eng.br/blog/14/08/2007/xv-and-mplayer-on-maemo/ if RUNNING_HILDON: gtk.gdk.error_trap_push() self._has_video = True #unbreakme: without this option the video doesn't redraw correctly when exposed self._drawing_area.unset_flags(gtk.DOUBLE_BUFFERED) s = caps[0] movie_aspect = float(s['width']) / s['height'] display_height = self._drawing_area.get_allocation().height new_display_width = float(display_height)*movie_aspect if new_display_width >= max_width: self._hpaned.set_position(max_width) elif new_display_width <= min_width: self._hpaned.set_position(min_width) else: self._hpaned.set_position(int(new_display_width)) if RUNNING_HILDON: def pop_trap(): gtk.gdk.flush() gtk.gdk.error_trap_pop() return False gobject.idle_add(pop_trap) def _seek_in_ready(self, new_pos): """many sources don't support seek in ready, so we do it the old fashioned way: play, wait for it to play, pause, wait for it to pause, and then seek""" model = self._queue_listview.get_model() i = -1 for row in model: i+=1 if i == self._current_index: row[2] = "•" #bullet else: row[2] = "" #save, because it may get overwritten when we play and pause dur = self._media_duration if not RUNNING_HILDON: volume = self._pipeline.get_property('volume') #temporarily mute to avoid little bloops during this hack self._pipeline.set_property('volume',0) #maemo throws X Window System errors when doing this -- ignore them #http://labs.morpheuz.eng.br/blog/14/08/2007/xv-and-mplayer-on-maemo/ if RUNNING_HILDON: gtk.gdk.error_trap_push() self.play(True) change_return, state, pending = self._pipeline.get_state(gst.SECOND * 10) if change_return != gst.STATE_CHANGE_SUCCESS: print "some problem changing state to play (bad uri? playbin2?)" self._using_playbin2 = False self.stop() self._pipeline.set_state(gst.STATE_NULL) self.gstreamer_init() self._last_index = -1 #trigger play() to reinit the pipe return False dur = self._pipeline.query_duration(gst.FORMAT_TIME)[0] self.pause() if change_return != gst.STATE_CHANGE_SUCCESS: print "some problem changing state to pause" self._using_playbin2 = False self._pipeline.set_state(gst.STATE_NULL) self.gstreamer_init() self._last_index = -1 #trigger play() to reinit the pipe return False self._media_position, self._media_duration = new_pos, dur self.seek(new_pos) if self._media_duration <= 0: self._media_duration = 1 self._resize_pane() self._seek_scale.set_range(0,self._media_duration) self._seek_scale.set_value(self._media_position) if not RUNNING_HILDON: self._pipeline.set_property('volume',volume) else: self._pipeline.set_property('volume', 10) self._update_time_label() if RUNNING_HILDON: def pop_trap(): gtk.gdk.flush() gtk.gdk.error_trap_pop() return False gobject.idle_add(pop_trap) return True def _tick(self): self.__no_seek = True try: now = self._pipeline.query_position(gst.FORMAT_TIME)[0] if now - self._last_tick > self._tick_interval: self._last_tick = now self.emit('tick') except: pass self._update_seek_bar() self._update_time_label() self._update_position() if self._prepare_save: self._prepare_save = False self.save() self.__no_seek = False return self._pipeline.get_state()[1] == gst.STATE_PLAYING def _periodic_save_cb(self): self._prepare_save = True return True def _update_seek_bar(self): #for some reason when paused, hildon tells us the position is 0. Ignore it if RUNNING_HILDON and self._pipeline.get_state()[1] == gst.STATE_PAUSED: return try: self._media_position = self._pipeline.query_position(gst.FORMAT_TIME)[0] #print self._media_position, self._media_duration if self._media_position > self._media_duration: self._media_duration = self._pipeline.query_duration(gst.FORMAT_TIME)[0] self._seek_scale.set_range(0,self._media_duration) self._seek_scale.set_value(self._media_position) except Exception, e: print e def _update_time_label(self): def nano_to_string(long_val): seconds = long_val / gst.SECOND minutes = seconds / 60 seconds = seconds % 60 return "%i:%.2i" % (minutes,seconds) self._time_label.set_text(nano_to_string(self._media_position)+" / "+nano_to_string(self._media_duration)) def _update_position(self, pos=None): if pos is None: pos = self._media_position model = self._queue_listview.get_model() model[self._current_index][3] = pos #def _on_sync_message(self, bus, message): # if message.structure is None: # return # if message.structure.get_name() == 'prepare-xwindow-id': # logging.debug("forcing aspect to true") # message.src.set_property('force-aspect-ratio', True) ###drag and drop### def _on_queue_drag_data_received(self, treeview, context, x, y, selection, targetType, thyme): if targetType == self._TARGET_TYPE_REORDER: treeview.emit_stop_by_name('drag-data-received') model, paths_to_copy = treeview.get_selection().get_selected_rows() if len(paths_to_copy) > 1: print "can only move one at a time" return row = list(model[paths_to_copy[0][0]]) iter_to_copy = model.get_iter(paths_to_copy[0]) try: path, pos = treeview.get_dest_row_at_pos(x, y) target_iter = model.get_iter(path) playing_uri = model[self._current_index][0] if self.checkSanity(model, iter_to_copy, target_iter): self.iterCopy(model, target_iter, row, pos) context.finish(True, True, thyme) #finishes the move i=-1 for row in model: i+=1 if playing_uri == row[0]: self._last_index = self._current_index = i row[2]="•" #bullet else: row[2]="" else: context.finish(False, False, thyme) except: model.append(row) context.finish(True, True, thyme) elif targetType == self._TARGET_TYPE_URI_LIST: uri_list = [s for s in selection.data.split('\r\n') if len(s) > 0] for uri in uri_list: uri = uri.replace("file://", "") uri = urllib.unquote(uri) self.queue_file(uri) def checkSanity(self, model, iter_to_copy, target_iter): path_of_iter_to_copy = model.get_path(iter_to_copy) path_of_target_iter = model.get_path(target_iter) if path_of_target_iter[0:len(path_of_iter_to_copy)] == path_of_iter_to_copy: return False else: return True def iterCopy(self, target_model, target_iter, row, pos): if (pos == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE) or (pos == gtk.TREE_VIEW_DROP_INTO_OR_AFTER): new_iter = target_model.append(row) elif pos == gtk.TREE_VIEW_DROP_BEFORE: new_iter = target_model.insert_before(target_iter, row) elif pos == gtk.TREE_VIEW_DROP_AFTER: new_iter = target_model.insert_after(target_iter, row) class GStreamerErrorDialog(gtk.Window): def __init__(self, type=gtk.WINDOW_TOPLEVEL): gtk.Window.__init__(self, type) self._last_message = "" self._label = gtk.Label() #gtk preparation vbox = gtk.VBox() vbox.pack_start(self._label, True, True, 0) hbox = gtk.HBox() l = gtk.Label("") hbox.pack_start(l, True) button = gtk.Button(stock='gtk-ok') button.connect('clicked', self._on_ok_clicked) hbox.pack_start(button, False) vbox.pack_start(hbox, False) self.add(vbox) self.connect('delete-event', self._on_delete_event) def show_error(self, error_msg): if error_msg == self._last_message: return self._last_message = error_msg self._label.set_text(error_msg) self.show_all() def _on_ok_clicked(self, button): self.hide() def _on_delete_event(self, widget, event): return self.hide_on_delete() #########app def do_quit(self, widget, player): player.finish() gtk.main_quit() def items_removed(player): print player.get_queue_count() def item_not_supported(app, player, filename, name): print filename,name, "not supported" fullscreen = False def on_app_key_press_event(widget, event, player, window): global fullscreen keyname = gtk.gdk.keyval_name(event.keyval) if keyname == 'f' or (RUNNING_HILDON and keyname == 'F6'): #maemo throws X Window System errors when doing this -- ignore them #http://labs.morpheuz.eng.br/blog/14/08/2007/xv-and-mplayer-on-maemo/ if RUNNING_HILDON: gtk.gdk.error_trap_push() fullscreen = not fullscreen player.toggle_controls(fullscreen) if fullscreen: window.window.fullscreen() else: window.window.unfullscreen() if RUNNING_HILDON: def pop_trap(): gtk.gdk.flush() gtk.gdk.error_trap_pop() return False gobject.idle_add(pop_trap) if __name__ == '__main__': # Here starts the dynamic part of the program window = gtk.Window() if RUNNING_SUGAR: import sugar.env home = os.path.join(sugar.env.get_profile_path(), 'penguintv') else: home = os.path.join(os.getenv('HOME'), ".penguintv") try: opts, args = getopt.getopt(sys.argv[1:], "p:", ["playlist="]) except getopt.GetoptError, e: print "error %s" % str(e) sys.exit(1) playlist = os.path.join(home, 'gst_playlist.pickle') if len(opts) > 0: print opts for o, a in opts: if o in ('-p', '--playlist'): playlist = a app = GStreamerPlayer(window, playlist) app.Show() window.connect('delete-event', do_quit, app) window.connect('key-press-event', on_app_key_press_event, app, window) window.resize(640,480) app.connect('items-removed', items_removed) app.connect('item-not-supported', item_not_supported) app.load() for item in args: app.queue_file(item) gtk.main() PenguinTV-4.2.0/penguintv/OPML.py0000644000000000000000000001263011017542553013427 0ustar # Released under the GNU Lesser General Public License, v2.1 or later # Copyright (c) 2002 Juri Pakaste # Changes and fixes copyright (c) 2008 Owen Williams # $Id: OPML.py,v 1.2 2006/04/04 21:46:10 ywwg Exp $ from xml.sax import saxutils, make_parser, SAXParseException from xml.sax.handler import feature_namespaces, feature_namespace_prefixes from xml.sax.saxutils import XMLGenerator from xml.sax.xmlreader import AttributesImpl from StringIO import StringIO import xml.sax._exceptions import sys import logging class OPML(dict): def __init__(self): self.outlines = [] def output(self, stream = sys.stdout): xg = XMLGenerator(stream) def elemWithContent(name, content): xg.startElement(name, AttributesImpl({})) if content is not None: xg.characters(content) xg.endElement(name) xg.startElement("opml", AttributesImpl({'version': '1.1'})) xg.startElement("head", AttributesImpl({})) for key in ('title', 'dateCreated', 'dateModified', 'ownerName', 'ownerEmail', 'expansionState', 'vertScrollState', 'windowTop', 'windowBotton', 'windowRight', 'windowLeft'): if self.has_key(key) and self[key] != "": elemWithContent(key, self[key]) xg.endElement("head") xg.startElement("body", AttributesImpl({})) for o in self.outlines: o.output(xg) xg.endElement("body") xg.endElement("opml") class Outline(dict): __slots__ = ('_children') def __init__(self): self._children = [] def add_child(self, outline): self._children.append(outline) def get_children_iter(self): return self.OIterator(self) children = property(get_children_iter, None, None, "") def output(self, xg): xg.startElement("outline", AttributesImpl(self)) for c in self.children: c.output(xg) xg.endElement("outline") class OIterator: def __init__(self, o): self._o = o self._index = -1 def __iter__(self): return self def next(self): self._index += 1 if self._index < len(self._o._children): return self._o._children[self._index] else: raise StopIteration class OutlineList: def __init__(self): self._roots = [] self._stack = [] def add_outline(self, outline): if len(self._stack): #if there is already something on the stack outline.setdefault('categories','') #set up some categories for o in self._stack: outline['categories'] += o['title'] + "," outline['categories'] = outline['categories'][0:-1] #and, uh, do this thing self._stack[-1].add_child(outline) else: #otherwise it's a root. I guess. outline.setdefault('title',"Untitled") outline.setdefault('text',outline['title']) self._roots.append(outline) self._stack.append(outline) def close_outline(self): if len(self._stack): #remove the child at the end of the stack (the one that just closed) del self._stack[-1] def roots(self): return self._roots try: from xml.sax.handler import ContentHandler def_handler = ContentHandler except: try: from xml.sax.saxutils import DefaultHandler def_handler = DefaultHandler except Exception, e: logging.error("couldn't get xml parsing") raise e class OPMLHandler(def_handler): def __init__(self): self._outlines = OutlineList() self._opml = None self._content = "" def startElement(self, name, attrs): if self._opml is None: if name != 'opml': raise ValueError, "This doesn't look like OPML" self._opml = OPML() if name == 'outline': o = Outline() o.update(attrs) self._outlines.add_outline(o) self._content = "" def endElement(self, name): if name == 'outline': self._outlines.close_outline() return if name == 'opml': self._opml.outlines = self._outlines.roots() return for key in ('title', 'dateCreated', 'dateModified', 'ownerName', 'ownerEmail', 'expansionState', 'vertScrollState', 'windowTop', 'windowBotton', 'windowRight', 'windowLeft'): if name == key: self._opml[key] = self._content return def characters(self, ch): self._content += ch def get_opml(self): return self._opml def parse(stream): parser = make_parser() parser.setFeature(feature_namespaces, 0) handler = OPMLHandler() parser.setContentHandler(handler) parser.parse(stream) return handler.get_opml() def outline_generator(outline): if type(outline) is list: for o in outline: if o.has_key('xmlUrl'): yield o for i in o.get_children_iter(): for item in outline_generator(i): yield item elif type(outline) is Outline: if outline.has_key('xmlUrl'): yield outline for i in outline.get_children_iter(): for item in outline_generator(i): yield item PenguinTV-4.2.0/penguintv/BTDownloader.py0000644000000000000000000001152411024340656015203 0ustar # Written by Owen Williams # see LICENSE for license information from Downloader import * from ptvbittorrent import download from threading import Event import time import os, os.path import ptvDB import utils import socket #Downloader API: #constructor takes: media, params, resume, queue, progress_callback, finished_callback # media: the media dic # params: optional params, like for btdownloader # resume: are we supposed to resume? # queue: are we supposed queue for playback when download is finished? this variable is just passed around # progress_callback: function to call for progress update. # arg of this is: (media, progress as 0 < x < 1, and text formatted message of progress) # the callback will return 1 if we should cancel download # finished_callback: function to call when finished. # args is: (media, status, message) # where status is 0=failed, 1=success, 2=success and queue class BTDownloader(Downloader): def __init__(self, media, media_dir, params="", resume=False, queue=1, progress_callback=None, finished_callback=None): #bit torrent always resumes if it can. Downloader.__init__(self, media, media_dir, params, resume, queue, progress_callback, finished_callback) self._bt_params = params self._done = Event() self._start_time=time.time() self._last_update=self._start_time self._downTotal=0 self._done_downloading = False def download(self,args_unused): if not Downloader.download(self,args_unused): #stopped before we began return params = ['--url' ,self.media['url']]+self._bt_params try: download.download(params, self._chooseFile, self._display, self._normalize_finished, self._error, self._done, 80, self._newpath) except socket.timeout, detail: self.media['errormsg'] = str(detail) self.status = FAILURE self.message = detail self._finished_callback() return except Exception, e: print e self.media['errormsg'] = _("There was an error downloading the torrent") self.status = FAILURE self.message = _("There was an error downloading the torrent") self._finished_callback() return if self.status not in [STOPPED,PAUSED]: self.status = STOPPED self.message = "" self._finished_callback() def _chooseFile(self, default, size, saveas, dir): self.total_size=size dated_dir = utils.get_dated_dir() change=0 if self.media['size']!=self.total_size: self.media['size']=self.total_size change = 1 if self.media['file']!=os.path.join(self._media_dir, dated_dir, str(default)): self.media['file']=os.path.join(self._media_dir, dated_dir, str(default)) change = 1 if change: db = ptvDB.ptvDB() db.set_media_filename(self.media['media_id'],self.media['file']) db.set_media_size(self.media['media_id'],self.media['size']) db.finish() del db return os.path.join(self._media_dir, dated_dir, str(default)) def _display(self, dict): if dict.has_key('downTotal'): self._downTotal = dict['downTotal'] if dict.has_key('fractionDone'): self.progress = int(dict['fractionDone']*100.0) d = {'progress':str(self.progress), 'size':utils.format_size(self.total_size) } if dict.has_key('timeEst'): d['time']=utils.hours(dict['timeEst']) self.message = _("Downloaded %(progress)s%% of %(size)s, %(time)s remaining.") % d else: self.message = _("Downloaded %(progress)s%% of %(size)s") % d if self._progress_callback() == 1: self._done.set() else: self.progress = 0 self.message = "" if self._progress_callback() == 1: self._done.set() if dict.has_key('upTotal'): #check to see if we should stop if self._done_downloading: if dict['upTotal'] >= self._downTotal: #if ratio is one, quit self._done.set() if time.time() - 60*60 >= self._start_time: #if it's been an hour, quit self._done.set() def _normalize_finished(self): if self._queue: self.status = FINISHED_AND_PLAY else: self.status = FINISHED d = {'filename':self.media['file']} self.message = _("Finished downloading %(filename)s") % d self._finished_callback() self._done_downloading = True #don't stop uploading, we keep going until 1:1 or one hour #FIXME: deal with directories just in case def _error(self, errormsg): #for some reason this isn't a fatal error if errormsg=='rejected by tracker - This tracker requires new tracker protocol. Please use our Easy Downloader or check blogtorrent.com for updates.': print "getting blogtorrent 'rejected by tracker' error, ignoring" else: self._done.set() raise TorrentError(errormsg) def _newpath(self, path): pass #print "new path?: " +path class NoDir(Exception): def __init__(self,durr): self.durr = durr def __str__(self): return "no such directory: "+self.durr class TorrentError(Exception): def __init__(self,m): self.m = m def __str__(self): return self.m PenguinTV-4.2.0/penguintv/AddSearchTagDialog.py0000644000000000000000000000352610646750251016261 0ustar # Written by Owen Williams # see LICENSE for license information import ptvDB import gtk import gettext import utils import LoginDialog _=gettext.gettext class AddSearchTagDialog: def __init__(self,xml,app): self._xml = xml self._app = app self._window = xml.get_widget("window_add_search_tag") for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self._query_widget = self._xml.get_widget("query_entry") self._tag_name_widget = self._xml.get_widget("tag_name_entry") def set_query(self, query): self._query_widget.set_text(query) def set_tag_name(self, tag_name): self._tag_name_widget.set_text(tag_name) def show(self): self._window.set_transient_for(self._app.main_window.get_parent()) self._tag_name_widget.grab_focus() self._window.show() #self.feed_url_widget.set_text("") self._tag_name_widget.set_text("") def on_window_add_feed_delete_event(self, widget, event): return self._window.hide_on_delete() def hide(self): self._tag_name_widget.set_text("") self._query_widget.set_text("") self._window.hide() def finish(self): try: self._app.add_search_tag(self._query_widget.get_text(), self._tag_name_widget.get_text()) except ptvDB.TagAlreadyExists, e: dialog = gtk.Dialog(title=_("Tag Name Already Exists"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("That tag name is already in use. Please choose a different name.")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.run() dialog.hide() del dialog self._tag_name_widget.grab_focus() return self.hide() def on_button_ok_clicked(self,event): self.finish() def on_tag_name_entry_activate(self, event): self.finish() def on_button_cancel_clicked(self,event): self.hide() PenguinTV-4.2.0/penguintv/SynchronizeDialog.py0000644000000000000000000002467511227076107016327 0ustar #(c) 2006 Owen Williams #see LICENSE import ptv_sync import ptvDB import gtk.glade import gobject import utils import logging import os, os.path if utils.HAS_LUCENE: import PyLucene threadclass = PyLucene.PythonThread else: import threading threadclass = threading.Thread class SynchronizeDialog: def __init__(self, gladefile, app): self._app = app self._xml = gtk.glade.XML(gladefile, 'synchronize_window','penguintv') self._dialog = self._xml.get_widget("synchronize_window") self._preview_dialog = self._xml.get_widget("sync_preview_window") for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self._audio_check = self._xml.get_widget("audio_check") self._delete_check = self._xml.get_widget("delete_check") self._move_check = self._xml.get_widget("move_check") self._destination_entry = self._xml.get_widget("dest_entry") self._delete_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_delete', False)) self._move_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_move', False)) self._audio_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_audio_only', False)) self._dest_dir = self._app.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_dest_dir', "") self._destination_entry.set_text(self._dest_dir) self._cancel = False if utils.HAS_GCONF: try: import gconf except: from gnome import gconf self._conf = gconf.client_get_default() self._conf.add_dir('/apps/penguintv',gconf.CLIENT_PRELOAD_NONE) self._conf.notify_add('/apps/penguintv/sync_delete',self.set_sync_delete) self._conf.notify_add('/apps/penguintv/sync_move',self.set_sync_move) self._conf.notify_add('/apps/penguintv/sync_audio_only',self.set_audio_only) self._conf.notify_add('/apps/penguintv/sync_dest_dir',self.set_dest_dir) self._progress_dialog = SynchronizeDialog.SyncProgress(gtk.glade.XML(gladefile, 'sync_progress_window','penguintv'), self._cancel_cb) self._preview_dialog = SynchronizeDialog.SyncPreview(gtk.glade.XML(gladefile, 'sync_preview_window','penguintv'), self._cancel_cb, self._sync_cb) def Show(self): self._dialog.show() def run(self): self._destination_entry.grab_focus() return self._dialog.run() def _check_dest_dir(self, dir_to_check): try: stat = os.stat(dir_to_check) if not os.path.isdir(dir_to_check): return False self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/sync_dest_dir',dir_to_check) logging.debug("sync destination OK: %s" % dir_to_check) return True except Exception, e: logging.warning("sync destination FAIL: %s \n %s" % (dir_to_check, str(e))) return False def _cancel_cb(self): self._cancel = True #hide preview just in case, but don't hide progress because we want to see that it's cancelling self._preview_dialog.hide() def _sync_cb(self): self._preview_dialog.hide() self.on_sync_button_clicked(None) def on_browse_button_clicked(self, event): dialog = gtk.FileChooserDialog(_('Select Destination Folder...'),None, action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) response = dialog.run() if response == gtk.RESPONSE_OK: self._destination_entry.set_text(dialog.get_filename()) dialog.destroy() def on_delete_check_toggled(self, event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/sync_delete',self._delete_check.get_active()) def on_move_check_toggled(self, event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/sync_move',self._move_check.get_active()) def on_audio_check_toggled(self, event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/sync_audio_only',self._audio_check.get_active()) def on_sync_button_clicked(self, event): self._dest_dir = self._destination_entry.get_text() if not self._check_dest_dir(self._dest_dir): dialog = gtk.Dialog(title=_("Destination Error"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("The destination you have selected is not valid. \nPlease select another destination and try again.")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog return #sync = ptv_sync.ptv_sync(self._dest_dir, self._delete_check.get_active(), self._audio_check.get_active()) move_files = self._move_check.get_active() sync = SynchronizeDialog._sync_thread(self._dest_dir, self._delete_check.get_active(), move_files, self._audio_check.get_active()) self._progress_dialog.progress_bar.set_fraction(0) self._progress_dialog.progress_label.set_text("") self._progress_dialog.Show() self._dialog.hide() def _sync_gen(): while True: p = sync.progress t = sync.total m = sync.message if not self._cancel: if move_files: for m_id in sync.pop_delete_list(): self._app.delete_media(m_id) if t == -1: self._progress_dialog.progress_bar.pulse() else: self._progress_dialog.progress_bar.set_fraction(float(p)/float(t)) self._progress_dialog.progress_label.set_markup(""+m+"") else: sync.interrupt() #don't exit loop, just keep going if p == t: break yield True if self._cancel: self._progress_dialog.hide() self._cancel = False self._progress_dialog.hide() yield False gobject.timeout_add(100,_sync_gen().next) sync.start() class _sync_thread(threadclass): def __init__(self, dest_dir, delete=False, move=False, audio=False): threadclass.__init__(self) self._dest_dir = dest_dir self._delete = delete self._move = move self._audio = audio self._cancel = False self._delete_list = [] self.progress = 0 self.total = 100 self.message = "" def interrupt(self): self._cancel = True def pop_delete_list(self): retval = self._delete_list self._delete_list = [] return retval def run(self): self._cancel = False sync = ptv_sync.ptv_sync(self._dest_dir, delete=self._delete, move=self._move, audio=self._audio) try: for event in sync.sync_gen(): if not self._cancel: self.progress = event[0] self.total = event[1] self.message = event[2] # Append to list of media ids to delete from app if event[3] is not None: self._delete_list.append(event[3]) else: sync.interrupt() #don't exit loop except Exception, e: print "error copying file:",e self.progress = self.total #just make sure this is done def on_preview_button_clicked(self, event): self._dest_dir = self._destination_entry.get_text() if not self._check_dest_dir(self._dest_dir): dialog = gtk.Dialog(title=_("Destination Error"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("The destination you have selected is not valid. Please select another destination and try again.")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog return sync = ptv_sync.ptv_sync(self._dest_dir, delete=self._delete_check.get_active(), move=self._move_check.get_active(), audio=self._audio_check.get_active(), dryrun=True) self._preview_dialog.buff.set_text("") self._preview_dialog.Show() self._dialog.hide() def _sync_gen(): text = "" last_message = "" for event in sync.sync_gen(): if not self._cancel: #text+="\n"+event[2] #self._preview_dialog.buff.set_text(text) if last_message != event[2]: self._preview_dialog.buff.insert_at_cursor(event[2]+"\n") last_message = event[2] else: sync.interrupt() #else:... just yield, let the generator run its course yield True if self._cancel: self._preview_dialog.hide() self._cancel = False yield False gobject.idle_add(_sync_gen().next) def on_cancel_button_clicked(self, event): self.hide() def on_synchronize_window_delete_event(self, widget, event): return self._dialog.hide_on_delete() def hide(self): self._dialog.hide() def set_sync_delete(self, client, *args, **kwargs): self._delete_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_delete', False)) def set_sync_move(self, client, *args, **kwargs): self._move_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_move', False)) def set_audio_only(self, client, *args, **kwargs): self._audio_check.set_active(self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_audio_only', False)) def set_dest_dir(self, client, *args, **kwargs): self._dest_dir = self._app.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_dest_dir', "") self._destination_entry.set_text(self._dest_dir) class SyncProgress: def __init__(self, xml, cancel_cb): self._dialog = xml.get_widget('sync_progress_window') self.progress_bar = xml.get_widget('sync_progressbar') self.progress_bar.set_pulse_step(.05) self.progress_label = xml.get_widget('progress_info_label') self._cancel_cb = cancel_cb for key in dir(self.__class__): if key[:3] == 'on_': xml.signal_connect(key, getattr(self,key)) def Show(self): self._dialog.show_all() def hide(self): self._dialog.hide() def on_sync_progress_window_delete_event(self, widget, event): return self._dialog.hide_on_delete() def on_cancel_button_clicked(self, event): self._cancel_cb() self.progress_label.set_markup(_("Cancelling...")) #self.hide() #stay up while the cancel completes class SyncPreview: def __init__(self, xml, cancel_cb, sync_cb): self._dialog = xml.get_widget('sync_preview_window') self._cancel_cb = cancel_cb self._sync_cb = sync_cb self.buff = gtk.TextBuffer() self._sync_textview = xml.get_widget('sync_textview') self._sync_textview.set_buffer(self.buff) for key in dir(self.__class__): if key[:3] == 'on_': xml.signal_connect(key, getattr(self,key)) def Show(self): self._dialog.show_all() def hide(self): self._dialog.hide() def on_sync_preview_window_delete_event(self, widget, event): return self._dialog.hide_on_delete() def on_sync_button_clicked(self, event): self._sync_cb() self.hide() def on_cancel_button_clicked(self, event): self._cancel_cb() self.hide() PenguinTV-4.2.0/penguintv/HTTPDownloader.py0000644000000000000000000001347011311773300015452 0ustar import logging import time from Downloader import * import ptvDB import pycurl import utils import os #import time import MediaManager #Downloader API: #constructor takes: media, params, resume, queue, progress_callback, finished_callback # media: the media dic # params: optional params, like for btdownloader # resume: are we supposed to resume? # queue: are we supposed queue for playback when download is finished? this variable is just passed around # progress_callback: function to call for progress update. # arg of this is: (media, progress as 0 < x < 1, and text formatted message of progress) # the callback will return 1 if we should cancel download # finished_callback: function to call when finished. # args is: (media, status, message) # where status is 0=failed, 1=success, 2=success and queue class HTTPDownloader(Downloader): """Need a little internal class to keep track of callbacks from urllib.urlretrieve""" def __init__(self, media, media_dir, params, resume, queue, progress_callback, finished_callback): Downloader.__init__(self, media, media_dir, params, resume, queue, progress_callback, finished_callback) #no params self._resume_from = 0 self._last_progress = 0 def download(self,args_unused): if not Downloader.download(self,args_unused): #stopped before we began, no callback return try: os.makedirs(os.path.dirname(self.media['file'])) except OSError: pass try: if self._resume: try: fp = open(self.media['file'], "ab") except: fp = open(self.media['file'], "wb") else: fp = open(self.media['file'], "wb") curl = pycurl.Curl() curl.setopt(pycurl.URL, str(self.media['url']).strip()) #cause it's unicode or some shit which is not a string or some junk. Also strip whitespace curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.NOSIGNAL, 1) #makes downloading thread-safe curl.setopt(pycurl.WRITEDATA, fp) curl.setopt(pycurl.PROGRESSFUNCTION, self._wrap_progress_callback) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.USERAGENT,'PenguinTV '+utils.VERSION) if self._resume: self._resume_from = os.stat(self.media['file'])[6] curl.setopt(pycurl.RESUME_FROM_LARGE, self._resume_from) curl.perform() response = curl.getinfo(pycurl.RESPONSE_CODE) curl.close() fp.close() if self.media['url'][:5] == "http:": if response != 200 and response != 206: logging.warning("HTTP download error: %s %s" % (self.media['url'],response)) if response == 404: self.media['errormsg']=_("404: File Not Found: %s" % self.media['url']) elif response == 400: #maybe not properly escaped url? import urlparse, urllib p = urlparse.urlparse(self.media['url']) new_p = (p[0], urllib.quote(p[1]), urllib.quote(p[2]), urllib.quote(p[3]), urllib.quote(p[4]), urllib.quote(p[5])) new_url = urlparse.urlunparse(new_p) self.media['url'] = new_url self.download(None) return else: d = {"response":response, "url":self.media['url']} self.media['errormsg']=_("Some HTTP error: %(response)s %(url)s") % d self.status = FAILURE self.message = self.media['errormsg'] self._finished_callback() return elif self.media['url'][:5] == "file:": pass #it's ok, curl would throw an exception on error elif self.media['url'][:4] == "ftp:": major_code = response / 100 if major_code == 2: #positive reply pass elif major_code == 4 or major_code == 5: d = {"response":response, "url":self.media['url']} self.media['errormsg']=_("FTP error: %(response)s %(url)s") % d else: d = {"response":response, "url":self.media['url']} self.media['errormsg']=_("Unexpected FTP response: %(response)s %(url)s") % d else: self.media['errormsg']=_("Unknown protocol for url %s" % self.media['url']) self.status = FAILURE self.message = self.media['errormsg'] self._finished_callback() return if self._queue: self.status = FINISHED_AND_PLAY self.message = _("finished downloading %s") % self.media['file'] self._finished_callback() return self.status = FINISHED self.message = _("finished downloading %s") % self.media['file'] self._finished_callback() except Exception, data: #this can happen if we cancelled the download if data[0]==33: #if server doesn't support resuming, retry self._resume=False self.download(None) elif data[0]==42: if self.status not in [STOPPED, PAUSED]: self.status = STOPPED self.message = "" self._finished_callback() else: logging.warning("some downloading error %s %s" % (str(data),self.media)) self.media['errormsg']=data self.status = FAILURE self.message = data self._finished_callback() def _wrap_progress_callback(self, dl_total, dl_now, ul_total, ul_now): now = time.time() if now - self._last_progress < 2.0: return self._last_progress = now if self._resume: #adjust sizes so that the percentages are correct dl_total += self._resume_from dl_now += self._resume_from try: self.progress = int((dl_now*100.0)/dl_total) except: self.progress = 0 if not self.media.has_key('size'): self.media['size_adjustment']=True elif self.media['size']!=round(dl_total) and not self._resume: self.media['size']=round(dl_total) self.media['size_adjustment']=True else: self.media['size_adjustment']=False d = { 'progress': str(self.progress), 'size': utils.format_size(self.media['size'])} self.total_size = self.media['size'] if self.total_size == 0: d['dl_now'] = dl_now self.message = _("Downloaded %(dl_now)s...") % d else: self.message = _("Downloaded %(progress)s%% of %(size)s") % d return self._progress_callback() PenguinTV-4.2.0/penguintv/penguintv.py0000755000000000000000000032301711416654346014715 0ustar #!/usr/bin/env python # Written by Owen Williams # using pieces from Straw # see LICENSE for license information #states: DEFAULT = 1 MANUAL_SEARCH = 2 TAG_SEARCH = 3 MAJOR_DB_OPERATION = 4 DONE_MAJOR_DB_OPERATION = 5 #memory profiling: #import code #from sizer import scanner ##objs = scanner.Objects() ##code.interact(local = {'objs': objs}) ##from sizer import formatting #import urlparse loaded as needed try: from sqlite3 import OperationalError as OperationalError import sqlite3 as sqlite except: from pysqlite2.dbapi2 import OperationalError as OperationalError from pysqlite2 import dbapi2 as sqlite import threading import sys, os, os.path import logging #import traceback logging.basicConfig(level=logging.DEBUG) import urllib import time import string import subprocess import getopt #socket.setdefaulttimeout(30.0) try: set except: from sets import Set as set import pygtk pygtk.require('2.0') import gtk import gtk.glade import gobject import locale import gettext try: import dbus import dbus.service HAS_DBUS = True except: HAS_DBUS = False #locale.setlocale(locale.LC_ALL, '') gettext.install('penguintv', '/usr/share/locale') gettext.bindtextdomain('penguintv', '/usr/share/locale') gettext.textdomain('penguintv') _=gettext.gettext DOWNLOAD_ERROR=0 DOWNLOAD_PROGRESS=1 DOWNLOAD_WARNING=2 DOWNLOAD_QUEUED=3 import utils import ptvDB if HAS_DBUS: import ptvDbus import MediaManager import Player import UpdateTasksManager import Downloader import ArticleSync if HAS_DBUS: import Poller import PreferencesDialog import MainWindow, FeedList if utils.RUNNING_HILDON: HAS_GNOME = False else: try: import gnome import gnome.ui HAS_GNOME=True except: HAS_GNOME=False if utils.HAS_STATUS_ICON: import PtvTrayIcon if utils.RUNNING_HILDON: #FIXME: should do something import hildon import HildonListener import osso # HAS_DBUS = False if not utils.RUNNING_HILDON: import webbrowser import gc CANCEL=0 PAUSE=1 REFRESH_SPECIFIED=0 REFRESH_AUTO=1 REFRESH_NEVER=2 LOCAL=0 REMOTE=1 if utils.RUNNING_SUGAR or utils.RUNNING_HILDON: AUTO_REFRESH_FREQUENCY=30*60*1000 else: AUTO_REFRESH_FREQUENCY=5*60*1000 class PenguinTVApp(gobject.GObject): __gsignals__ = { 'feed-polled': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_PYOBJECT])), 'feed-added': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_BOOLEAN])), 'feed-removed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])), 'feed-name-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING])), 'entry-updated': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_INT])), 'entries-viewed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'entries-unviewed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'render-ops-updated': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'notify-tags-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), # the integer here is really just so I can avoid a circular codepath # in tag editor ng 'tags-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])), 'download-finished': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'app-loaded': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([])), 'setting-changed':(gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_PYOBJECT])), 'state-changed':(gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_PYOBJECT])), 'online-status-changed':(gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_BOOLEAN])), 'new-database': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), } def __init__(self, window=None, playlist=None): gobject.GObject.__init__(self) self._for_import = [] self.__importing = False self._app_loaded = False self._remote_poller = None self._remote_poller_pid = 0 self._exiting=0 if HAS_DBUS: #if we can get a dbus object, and it's using #our database, penguintv is already running bus = dbus.SessionBus() dubus = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/dbus') dubus_methods = dbus.Interface(dubus, 'org.freedesktop.DBus') if dubus_methods.NameHasOwner('com.ywwg.PenguinTV'): remote_object = bus.get_object("com.ywwg.PenguinTV", "/PtvApp") remote_app = dbus.Interface(remote_object, "com.ywwg.PenguinTV.AppInterface") if remote_app.GetDatabaseName() == os.path.join(utils.get_home(), "penguintv4.db"): #shouldn't happen if this file is run with __main__ raise AlreadyRunning, remote_app #initialize dbus object name = dbus.service.BusName("com.ywwg.PenguinTV", bus=bus) ptv_dbus = ptvDbus.ptvDbus(self, name) self._net_connected = True self.connect('online-status-changed', self.__online_status_changed) self.connect('state-changed', self._state_changed_cb) if utils.get_share_prefix() is None: logging.error("Error finding glade file. Have you run python setup.py build?") sys.exit() logging.info("penguintv " + utils.VERSION + " startup") self.db = ptvDB.ptvDB(self.polling_callback, self._emit_change_setting) #we have already run this down at the bottom, but run it again #because we don't init the DB down there (should we?) self._firstrun = self.db.maybe_initialize_db() # Clean media status on startup, not exit, in case of crash. self.db.clean_media_status() media_dir = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', os.path.join(utils.get_home(), "media")) media_dir = media_dir.replace("\"","") self.mediamanager = MediaManager.MediaManager(self, media_dir, self._progress_callback, self._finished_callback) self._polled=0 #Used for updating the polling progress bar self._poll_message = "" self._polling_taskinfo = -1 # the taskid we can use to waitfor a polling operation, # and the time of last polling self._polling_thread = REMOTE self._poll_new_entries = [] self.polling_frequency=12*60*60*1000 self._bt_settings = {} self._auto_download = False self._auto_download_limiter = False self._auto_download_limit=50*1024 self._saved_filter = FeedList.ALL self._saved_search = "" self._showing_search = False self._threaded_searcher = None self._waiting_for_search = False self._state = DEFAULT self._update_thread = None window_layout = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/app_window_layout', 'planet') if utils.RUNNING_SUGAR: window_layout='planet' #always use planet on sugar platform if utils.RUNNING_HILDON: window_layout='hildon_planet' #stupid gconf will default to false if the key doesn't exist. And of course the schema usually #doesn't work until they re-login... if not utils.RUNNING_SUGAR: use_internal_player = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/use_internal_player', True) else: use_internal_player = True self._status_icon = None if utils.HAS_STATUS_ICON: if utils.RUNNING_HILDON: icon = "/usr/share/icons/hicolor/26x26/hildon/penguintvicon.png" if not os.path.isfile(icon): icon = utils.get_image_path('penguintvicon.png') else: icon = utils.get_image_path('penguintvicon.png') self._status_icon = PtvTrayIcon.PtvTrayIcon(self, icon) self.main_window = MainWindow.MainWindow(self, use_internal_player, window=window, status_icon=self._status_icon, playlist=playlist) self.main_window.layout=window_layout self._hildon_context = None if utils.RUNNING_HILDON: self._hildon_context = osso.Context("PenguinTV", utils.VERSION, False) #some signals self.connect('feed-added', self.__feed_added_cb) def _handle_db_exception(self): logging.debug("Got db exception, reconnecting to database") self.db._db.close() del self.db self.db = ptvDB.ptvDB(self.polling_callback, self._emit_change_setting) self.emit('new-database', self.db) @utils.db_except() def post_show_init(self): """After we have Show()n the main window, set up some more stuff""" gst_player = self.main_window.get_gst_player() self.player = Player.Player(self, gst_player) if gst_player is not None: gst_player.connect('item-not-supported', self._on_item_not_supported) gst_player.connect('tick', self._on_gst_tick) self._gui_updater = UpdateTasksManager.UpdateTasksManager(UpdateTasksManager.GOBJECT, "gui updater") if utils.RUNNING_HILDON: hildon_listener = HildonListener.HildonListener(self, self.main_window.window, self._hildon_context) #WINDOWS #TODO: move this initialization till when we actually need these if utils.RUNNING_HILDON: self.window_preferences = PreferencesDialog.PreferencesDialog(gtk.glade.XML(os.path.join(utils.get_glade_prefix(),'hildon_dialogs.glade'), "window_preferences",'penguintv'),self) #MAGIC else: self.window_preferences = PreferencesDialog.PreferencesDialog(gtk.glade.XML(os.path.join(utils.get_glade_prefix(),'dialogs.glade'), "window_preferences",'penguintv'),self) #MAGIC self.window_preferences.hide() if utils.HAS_STATUS_ICON: self._status_icon.set_parent(self.main_window.window) #gconf if utils.HAS_GCONF: try: import gconf except: from gnome import gconf conf = gconf.client_get_default() conf.add_dir('/apps/penguintv',gconf.CLIENT_PRELOAD_NONE) conf.notify_add('/apps/penguintv/auto_resume',self._gconf_set_auto_resume) conf.notify_add('/apps/penguintv/poll_on_startup',self._gconf_set_poll_on_startup) conf.notify_add('/apps/penguintv/cache_images_locally',self._gconf_set_cache_images) conf.notify_add('/apps/penguintv/bt_max_port',self._gconf_set_bt_maxport) conf.notify_add('/apps/penguintv/bt_min_port',self._gconf_set_bt_minport) conf.notify_add('/apps/penguintv/bt_ul_limit',self._gconf_set_bt_ullimit) conf.notify_add('/apps/penguintv/feed_refresh_frequency',self._gconf_set_polling_frequency) conf.notify_add('/apps/penguintv/app_window_layout',self._gconf_set_app_window_layout) conf.notify_add('/apps/penguintv/feed_refresh_method',self._gconf_set_feed_refresh_method) conf.notify_add('/apps/penguintv/show_notifications',self._gconf_set_show_notifications) conf.notify_add('/apps/penguintv/auto_download',self._gconf_set_auto_download) conf.notify_add('/apps/penguintv/show_notification_always',self._gconf_set_show_notification_always) conf.notify_add('/apps/penguintv/auto_download_limiter',self._gconf_set_auto_download_limiter) conf.notify_add('/apps/penguintv/auto_download_limit',self._gconf_set_auto_download_limit) conf.notify_add('/apps/penguintv/media_storage_location',self._gconf_set_media_storage_location) conf.notify_add('/apps/penguintv/media_storage_style',self._gconf_set_media_storage_style) conf.notify_add('/apps/penguintv/use_article_sync',self._gconf_set_use_article_sync) #conf.notify_add('/apps/penguintv/sync_username',self._gconf_set_sync_username) #conf.notify_add('/apps/penguintv/sync_password',self._gconf_set_sync_password) conf.notify_add('/apps/penguintv/sync_readonly', self._gconf_set_sync_readonly) self._load_settings() #more DBUS if HAS_DBUS: sys_bus = dbus.SystemBus() try: sys_bus.add_signal_receiver(self._nm_device_no_longer_active, 'DeviceNoLongerActive', 'org.freedesktop.NetworkManager', 'org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') sys_bus.add_signal_receiver(self._nm_device_now_active, 'DeviceNowActive', 'org.freedesktop.NetworkManager', 'org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') nm_ob = sys_bus.get_object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager") self._nm_interface = dbus.Interface(nm_ob, "org.freedesktop.NetworkManager") logging.info("Listening to NetworkManager") except: logging.warning("Couldn't connect to NetworkManager") self._nm_interface = None self.feed_list_view = self.main_window.feed_list_view self._entry_list_view = self.main_window.entry_list_view self._entry_view = self.main_window.entry_view self._entry_view.post_show_init() self._article_sync = self._setup_article_sync() self.window_preferences.set_article_sync(self._article_sync) self.window_preferences.set_article_sync_plugin( \ self._article_sync.get_current_plugin()) self._connect_signals() self.main_window.search_container.set_sensitive(False) if utils.HAS_SEARCH: #if self.db.cache_dirty or self.db.searcher.needs_index: #assume index is bad as well or if it is bad if self.db.searcher.needs_index: #if index is bad self.main_window.search_entry.set_text(_("Please wait...")) self.main_window.display_status_message(_("Reindexing Feeds...")) self.db.doindex(self._sensitize_search) self._populate_feeds(self._done_populating_dont_sensitize) else: self._populate_feeds(self._done_populating) else: ##PROFILE: comment out self._populate_feeds(self._done_populating) #val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/selected_feed', 0) #if val > 0: # self.feed_list_view.set_selected(val) val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/selected_entry', 0) if val > 0: #self._entry_list_view.set_selected(val) try: self.select_entry(val) except: pass #crash protection: if we crash, we'll have resetted selected_feed to 0 self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_feed', 0) self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_entry', 0) if self._autoresume: gobject.idle_add(self.resume_resumable) self.update_disk_usage() # do this after we have the poller if self._firstrun and not utils.RUNNING_HILDON: self._import_default_feeds() #gtk.gdk.threads_leave() self.emit('app-loaded') self._app_loaded = True self.db.done_initializing() if utils.RUNNING_HILDON: self.main_window.feed_tabs.set_current_page(0) #self.save_settings() return False #for idler def poller_ping_cb(self): if self._exiting: return False # -2 pid means we are ready to grab # -1 pid indicates we are already trying to grab # 0 pid indicates no poller, dead poller, or no other status. if self._remote_poller is None and self._remote_poller_pid == -2: logging.debug("poller ping, getting it") p = threading.Thread(None, self._get_poller) p.start() return True def _spawn_poller(self): self._remote_poller_pid = -2 rundir = os.path.split(utils.__file__)[0] if rundir == "": rundir = "./" #logging.debug("RUN POLLER NOW") logging.debug("running poller: %s %s" % ('/usr/bin/env python', os.path.join(rundir, 'Poller.py'))) subprocess.Popen(['/usr/bin/env', 'python', os.path.join(rundir, 'Poller.py')]) def _get_poller(self): self._remote_poller = None self._remote_poller_pid = -1 gtk.gdk.threads_enter() bus = dbus.SessionBus() dubus = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/dbus') dubus_methods = dbus.Interface(dubus, 'org.freedesktop.DBus') gtk.gdk.threads_leave() wait_time = 5 sleep_time = 0.3 if utils.RUNNING_HILDON: wait_time = 5 sleep_time = 2 for i in range(0, wait_time): if self._exiting: break #sleep first to give Poller time to return its dbus call (which is #what triggered this function #don't want to deadlock mutual dbus calls time.sleep(sleep_time) gtk.gdk.threads_enter() logging.debug("Getting poller now") if dubus_methods.NameHasOwner('com.ywwg.PenguinTVPoller'): o = bus.get_object("com.ywwg.PenguinTVPoller", "/PtvPoller") poller = dbus.Interface(o, "com.ywwg.PenguinTVPoller.PollInterface") try: if poller.is_quitting(): break else: self._remote_poller = poller self._remote_poller_pid = self._remote_poller.get_pid() except: gtk.gdk.threads_leave() break gtk.gdk.threads_leave() break gtk.gdk.threads_leave() if self._remote_poller is None: logging.error("Unable to start remote poller. Polling will be done in-process") self._remote_poller_pid = 0 else: logging.debug("Got poller") if utils.RUNNING_HILDON: # Now that we have the poller, do firstrun import gtk.gdk.threads_enter() if self._firstrun: self._import_default_feeds() gtk.gdk.threads_leave() #return success or fail return self._remote_poller_pid != 0 def _check_poller(self): #logging.debug("Checking in on the poller: %i" % self._remote_poller_pid) if self._remote_poller_pid < 0: logging.debug("Not checking, no poller anyway (maybe it hasn't started up)") elif self._remote_poller_pid == 0: logging.debug("No poller, spawning it") self._spawn_poller() else: try: #is the process still running? os.kill(self._remote_poller_pid, 0) except Exception, e: logging.error("We lost the poller: %s" % str(e)) if self._polling_taskinfo != -1: self._polled = 0 self._polling_taskinfo = -1 self._poll_message = "" if not utils.RUNNING_HILDON: self._article_sync.get_readstates_for_entries(self._poll_new_entries) self._poll_new_entries = [] self.main_window.update_progress_bar(-1, MainWindow.U_POLL) self.main_window.display_status_message(_("Polling Error"),MainWindow.U_POLL) gobject.timeout_add(2000, self.main_window.display_status_message,"") self._remote_poller = None self._remote_poller_pid = 0 self._spawn_poller() return True if self._polling_taskinfo == -1: #logging.debug("pinging the poller") try: #and is it still responding? self._remote_poller.ping() #logging.debug("Poller still running") except Exception, e: logging.debug("no ping response, rerunning poller: %s" % str(e)) os.kill(self._remote_poller_pid, 15) self._spawn_poller() if self._get_poller(): logging.debug("restarted poller") else: logging.debug("failed to restart poller") #else: # logging.debug("not pinging while polling") return True def _setup_article_sync(self): if utils.ENABLE_ARTICLESYNC: enabled = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/use_article_sync', False) else: enabled = False plugin = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/article_sync_plugin', "") readonly = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_readonly', False) article_sync = ArticleSync.ArticleSync(self, self._entry_view, plugin, enabled, readonly) self.window_preferences.set_use_article_sync(enabled) self.window_preferences.set_article_sync_readonly(readonly) return article_sync def sync_authenticate(self, newplugin=None, cb=None): logging.debug("authenticating sync settings") def authenticate_cb(result): if result: self._sync_articles_put() #self._sync_articles_get() self.window_preferences.set_sync_status(_("Logged in")) else: self.window_preferences.set_sync_status(_("Not Logged in")) if cb is not None: cb(result) return False def _do_authenticate(): if not self._article_sync.is_loaded(): logging.debug("plugin not loaded yet") return True self._article_sync.authenticate(cb=authenticate_cb) return False #username = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_username', "") #self._article_sync.set_username(username) #password = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_password', "") #self._article_sync.set_password(password) if newplugin is not None: logging.debug("telling old plugin to clean up") self._article_sync.finish() logging.debug("old plugin done") else: if self._article_sync.get_current_plugin() is None: logging.debug("no current plugin specified to authenticate") return if not self._article_sync.is_loaded(): logging.debug("app loading the plugin") self._article_sync.load_plugin(newplugin) gobject.timeout_add(500, _do_authenticate) else: _do_authenticate() def _sync_articles_put(self): timestamp = self.db.get_setting(ptvDB.INT, 'article_sync_timestamp', int(time.time())) self._article_sync.submit_readstates_since(timestamp, self.__put_readstates_cb) def __put_readstates_cb(self, success): self._sync_articles_get() def _sync_articles_get(self): #timestamp = self.db.get_setting(ptvDB.INT, 'article_sync_timestamp', int(time.time()) - (60 * 60 * 24)) # because clocks might be different, make it everything in the past day #timestamp -= 60 * 60 * 24 #self._article_sync.get_readstates_since(timestamp) hashlist = self.db.get_unread_hashes() logging.debug("getting readstates for %i entries" % (len(hashlist))) self._article_sync.get_readstates(hashlist) def __got_readstates_cb(self, o, viewlist): if self._exiting: #logging.debug("got readstates, but no time to apply them") return if len(viewlist) > 0: self.mark_entrylist_viewstate(viewlist, True) self.emit('entries-viewed', viewlist) #else: # logging.debug("stamping even though none found") #logging.debug("SETTING GCONF TIMESTAMP=========") self.db.set_setting(ptvDB.INT, 'article_sync_timestamp', int(time.time())) def __sent_readstates_cb(self, o): def __do(): self.db.set_setting(ptvDB.INT, 'article_sync_timestamp', int(time.time())) return False gobject.idle_add(__do) return False #def _submit_new_readstates(self): # def _submit_cb(result): # if result: # logging.debug("success submitting readstates!") # self.db.set_setting(ptvDB.INT, 'article_sync_timestamp', int(time.time())) # else: # logging.debug("trouble submitting readstates") # return False # # logging.debug("submitting new readstates, maybe") # if not self._article_sync.is_enabled(): # logging.debug("not enabled") # return True # if not self._article_sync.is_authenticated(): # logging.debug("not authenticated (trying again)") # self._article_sync.authenticate(lambda x: self._startup_article_sync(x, False)) # else: # logging.debug("going for it!") # timestamp = self.db.get_setting(ptvDB.INT, 'article_sync_timestamp', # int(time.time()) - (1 * 24 * 60 * 60)) #one day # self._article_sync.submit_readstates_since(timestamp, _submit_cb) # # return True def _import_default_feeds(self): found_subs = False for path in (os.path.join(utils.GetPrefix(), "share" ,"penguintv"), os.path.join(utils.GetPrefix(), "share"), os.path.join(utils.GetPrefix(),"share","sugar","activities","ptv","share"), os.path.join(os.path.split(os.path.split(utils.__file__)[0])[0],'share'), "/usr/share/penguintv"): try: if utils.HAS_PYXML: subs_name = "defaultsubs.opml" else: subs_name = "defaultsubs.txt" os.stat(os.path.join(path,subs_name)) found_subs = True break except: continue if not found_subs: logging.error("ptvdb: error finding default subscription file.") sys.exit() f = open(os.path.join(path,subs_name), "r") self.main_window.display_status_message(_("Polling feeds for the first time...")) self.import_subscriptions(f, utils.HAS_PYXML) def _connect_signals(self): #self._entry_list_view.connect('entry-selected', self.__entry_selected_cb) self.feed_list_view.connect('state-change', self.__feedlist_state_change_cb) #self._entry_view.connect('entry-selected', self.__entry_selected_cb) self._entry_view.connect('entries-viewed', self.__entries_viewed_cb) self._article_sync.connect('got-readstates', self.__got_readstates_cb) self._article_sync.connect('sent-readstates', self.__sent_readstates_cb) self.feed_list_view.set_entry_view(self._entry_view) self._entry_list_view.set_entry_view(self._entry_view) #def __entry_selected_cb(self, o, entry_id, feed_id): # if self._state == MANUAL_SEARCH or self._state == TAG_SEARCH and feed_id != -1: # self.select_feed(feed_id) # #FIXME: we're not passing the query for highlighting purposes here # if self.db.get_flags_for_feed(feed_id) & ptvDB.FF_MARKASREAD == 0: # #self.db.set_entry_read(entry_id, 1) # self._delayed_set_viewed(feed_id, [entry_id]) # #def __entries_selected_cb(self, o, feed_id, entrylist): # #self.mark_entrylist_viewstate(feed_id, entrylist, False) # self._delayed_set_viewed(feed_id, entrylist) def __entries_viewed_cb(self, o, viewlist): if self._exiting: return #logging.debug("got viewlist: %s" % str(viewlist)) self.mark_entrylist_viewstate(viewlist, True) def __entries_unviewed_cb(self, o, unviewedlist): if self._exiting: return self.mark_entrylist_viewstate(unviewedlist, False) def __feedlist_state_change_cb(self, o, new_state): self.set_state(new_state) @utils.db_except() def __online_status_changed(self, o, connected): self._net_connected = connected if not self._net_connected: if self._update_thread is not None: if self._update_thread.isAlive() and not self._update_thread.isDying(): updater, db = self._get_updater() db.interrupt_poll_multiple() if self.db: self.db.interrupt_poll_multiple() @utils.db_except() def _load_settings(self): val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/feed_refresh_frequency', 60) self.polling_frequency = val*60*1000 self.window_preferences.set_feed_refresh_frequency(self.polling_frequency/(60*1000)) val = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method') if val is None: self.feed_refresh_method=REFRESH_AUTO else: if val == 'auto': self.feed_refresh_method=REFRESH_AUTO elif val == 'specified': self.feed_refresh_method=REFRESH_SPECIFIED elif val == 'never': self.feed_refresh_method=REFRESH_NEVER else: self.feed_refresh_method=REFRESH_AUTO self.window_preferences.set_feed_refresh_method(self.feed_refresh_method) if self.feed_refresh_method == REFRESH_AUTO: gobject.timeout_add(AUTO_REFRESH_FREQUENCY,self.do_poll_multiple, AUTO_REFRESH_FREQUENCY) elif self.feed_refresh_method == REFRESH_SPECIFIED: gobject.timeout_add(self.polling_frequency,self.do_poll_multiple, self.polling_frequency) val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/bt_min_port', 6881) self._bt_settings['min_port']=val val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/bt_max_port', 6999) self._bt_settings['max_port']=val val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/bt_ul_limit', 0) self._bt_settings['ul_limit']=val self.window_preferences.set_bt_settings(self._bt_settings) self.mediamanager.set_bt_settings(self._bt_settings) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/auto_resume', True) self._autoresume = val self.window_preferences.set_auto_resume(val) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/poll_on_startup', True) self.poll_on_startup = val self.window_preferences.set_poll_on_startup(val) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/cache_images_locally', False) self.window_preferences.set_cache_images(val) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/auto_download', False) self._auto_download = val self.window_preferences.set_auto_download(val) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/show_notification_always', True) if utils.HAS_STATUS_ICON: self._status_icon.set_show_always(val) self.window_preferences.set_show_notification_always(val) val = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/auto_download_limiter', False) self._auto_download_limiter = val self.window_preferences.set_auto_download_limiter(val) if utils.RUNNING_SUGAR: default_max = 50*1024 else: default_max = 1024*1024 val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/auto_download_limit', default_max) self._auto_download_limit = val self.window_preferences.set_auto_download_limit(val) val = self.mediamanager.get_media_dir() self.window_preferences.set_media_storage_location(val) val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/media_storage_style', 0) self.window_preferences.set_media_storage_style(val) self.mediamanager.set_storage_style(val) @utils.db_except() def save_settings(self): if self.main_window.feed_pane is not None: self.db.set_setting(ptvDB.INT, '/apps/penguintv/feed_pane_position', self.main_window.feed_pane.get_position()) if self.main_window.entry_pane is not None: self.db.set_setting(ptvDB.INT, '/apps/penguintv/entry_pane_position', self.main_window.entry_pane.get_position()) if self.main_window.app_window is not None: if self.main_window.window_maximized == False: x,y=self.main_window.app_window.get_position() w,h=self.main_window.app_window.get_size() self.db.set_setting(ptvDB.INT, '/apps/penguintv/app_window_position_x',x) self.db.set_setting(ptvDB.INT, '/apps/penguintv/app_window_position_y',y) else: #grabbing the size when we are maximized is pointless, so just go by the old resized size w = self.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_size_x', 500) h = self.db.get_setting(ptvDB.INT, '/apps/penguintv/app_window_size_y', 500) w,h=(-abs(w),-abs(h)) self.db.set_setting(ptvDB.INT, '/apps/penguintv/app_window_size_x',w) self.db.set_setting(ptvDB.INT, '/apps/penguintv/app_window_size_y',h) self.db.set_setting(ptvDB.STRING, '/apps/penguintv/app_window_layout',self.main_window.layout) if self.feed_refresh_method==REFRESH_AUTO: self.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method','auto') elif self.feed_refresh_method == REFRESH_SPECIFIED: self.db.set_setting(ptvDB.INT, '/apps/penguintv/feed_refresh_frequency',self.polling_frequency/(60*1000)) self.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method','specified') else: self.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method','never') self.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_max_port',self._bt_settings['max_port']) self.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_min_port',self._bt_settings['min_port']) self.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_ul_limit',self._bt_settings['ul_limit']) self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_resume',self._autoresume) self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/poll_on_startup',self.poll_on_startup) self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_download',self._auto_download) self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_download_limiter',self._auto_download_limiter) self.db.set_setting(ptvDB.INT, '/apps/penguintv/auto_download_limit',self._auto_download_limit) if self.feed_list_view.filter_setting > FeedList.NONE: self.db.set_setting(ptvDB.STRING, '/apps/penguintv/default_filter',self.feed_list_view.filter_name) else: self.db.set_setting(ptvDB.STRING, '/apps/penguintv/default_filter',"") if not self.main_window.changing_layout: val = self.feed_list_view.get_selected() if val is None: val = 0 self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_feed', val) val = self._entry_list_view.get_selected_id() if val is None: val = 0 self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_entry', val) media_dir = self.window_preferences.get_media_storage_location() if media_dir is not None: self.db.set_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', media_dir) self.db.set_setting(ptvDB.INT, '/apps/penguintv/media_storage_style', self.mediamanager.get_storage_style()) enabled = self.window_preferences.get_use_article_sync() self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/use_article_sync', enabled) readonly = self.window_preferences.get_article_sync_readonly() self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/sync_readonly', readonly) #self.db.set_setting(ptvDB.BOOL, '/apps/penguintv/use_internal_player', self.player.using_internal_player()) @utils.db_except() def resume_resumable(self): list = self.db.get_resumable_media() if list: gobject.idle_add(self._resumer_generator(list).next) return False #to cancel idler @utils.db_except() def _resumer_generator(self, list): for medium in list: self.mediamanager.download(medium['media_id'], False, True) #resume please self.db.set_entry_read(medium['entry_id'],False) self.feed_list_view.update_feed_list(medium['feed_id'],['icon']) yield True yield False def is_exiting(self): return self._exiting def do_quit(self): """save and shut down all our threads""" ##good breakpoint for gc analysis #import code #code.interact() logging.info('ptv quitting') self._exiting=1 self._entry_view.finish() self.feed_list_view.interrupt() if self._update_thread is not None: if self._update_thread.isAlive(): self._update_thread.goAway() if self._article_sync.is_enabled(): self.main_window.display_status_message(_("Synchronizing Articles")) self.main_window.finish() logging.info('stopping downloads') self.stop_downloads() logging.info('saving settings') self.save_settings() #if anything is downloading, report it as paused, because we pause all downloads on quit feed_cache = self.feed_list_view.get_feed_cache() feed_dict = {} for feed_id, flag, unread, total, pollfail, firstentrytitle in feed_cache: feed_dict[feed_id] = unread adjusted_cache = [[c[0],(c[1] & ptvDB.F_DOWNLOADING and c[1]-ptvDB.F_DOWNLOADING+ptvDB.F_PAUSED or c[1]),c[2],c[3],c[4],c[5]] for c in feed_cache] self.db.set_feed_cache(adjusted_cache) logging.info('stopping mediamanager') self.mediamanager.finish() self._article_sync.finish() def article_sync_wait(giveup): if time.time() > giveup: logging.warning("articlesync taking too long, quitting anyway") elif self._article_sync.is_working(): return True #while threading.activeCount()>1: # print threading.enumerate() # print str(threading.activeCount())+" threads active..." # time.sleep(1) logging.info('stopping db') self.db.finish(majorsearchwait=False) if not utils.RUNNING_SUGAR and not utils.RUNNING_HILDON: logging.info('main_quit') gtk.main_quit() self._exiting = 2 return False toolong = time.time() + 120 gobject.timeout_add(250, article_sync_wait, toolong) def is_quit_complete(self): return self._exiting > 1 def write_feed_cache(self): self.db.set_feed_cache(self.feed_list_view.get_feed_cache()) def do_poll_multiple(self, was_setup=None, arguments=0, feeds=None, message=None, local=False): """"was_setup": So do_poll_multiple is going to get called by timers and manually, and we needed some way of saying "I've got a new frequency, stop the old timer and start the new one." so it checks to see that the frequency it 'was setup' with is the same as the current frequency. If not, exit with False to stop the timer.""" if self._state == MAJOR_DB_OPERATION or not self._app_loaded or self._exiting: return True if was_setup is not None: if self.feed_refresh_method == REFRESH_AUTO: pass #if was_setup==0: #initial poll # arguments = arguments | ptvDB.A_ALL_FEEDS elif self.feed_refresh_method == REFRESH_SPECIFIED: if was_setup!=self.polling_frequency and was_setup!=0: return False else: return False if self.feed_refresh_method==REFRESH_AUTO: arguments = arguments | ptvDB.A_AUTOTUNE if not self._net_connected: return True if self._polling_taskinfo != -1: if time.time() - self._polling_taskinfo > 20*60: #logging.debug("reset polling taskinfo 972") self._polling_taskinfo = -1 else: return True if utils.RUNNING_HILDON and was_setup is not None: if self.mediamanager.get_download_count() > 0: logging.debug("delaying poll until downloads are complete") if message is None: self._poll_message = _("Polling Feeds...") else: self._poll_message = message self.main_window.update_progress_bar(0,MainWindow.U_POLL) self.main_window.display_status_message(self._poll_message, MainWindow.U_POLL) self._poll_new_entries = [] if self._remote_poller is not None and not local: self._polling_thread = REMOTE logging.debug("Using remote poller") if feeds is None: try: self._remote_poller.poll_all(arguments, "FinishedCallback") except Exception, e: #don't reset poller, maybe it just timed out. _check_poller #will find out for sure logging.debug("lost the poller, trying again with local poller (err: %s)" % str(e)) return self.do_poll_multiple(was_setup, arguments, feeds, message, local=True) else: try: self._remote_poller.poll_multiple(arguments, feeds, "FinishedCallback") except: logging.debug("lost the poller, trying again with local poller (2)") return self.do_poll_multiple(was_setup, arguments, feeds, message, local=True) self._polling_taskinfo = int(time.time()) else: self._polling_thread = LOCAL logging.debug("Polling in-process") updater, db = self._get_updater() task_id = updater.queue(db.poll_multiple, (arguments,feeds)) if arguments & ptvDB.A_ALL_FEEDS==0: self._gui_updater.queue(self.main_window.display_status_message,_("Feeds Updated"), task_id, False) #insane: queueing a timeout self._gui_updater.queue(gobject.timeout_add, (2000, self.main_window.display_status_message, ""), task_id, False) self._polling_taskinfo = self._gui_updater.queue(self.update_disk_usage, None, task_id, False) #because this is also waiting if self._auto_download == True: self._polling_taskinfo = self._gui_updater.queue(self._auto_download_unviewed, None, task_id) #gtk.gdk.threads_leave() if was_setup!=0: return True return False def poll_finished_cb(self, total): """only called over dbus when poller.py finishes. Keep this fast to prevent timeouts over dbus.""" self._gui_updater.queue(self.update_disk_usage) if self._auto_download == True: self._gui_updater.queue(self._auto_download_unviewed) self._gui_updater.set_completed(self._polling_taskinfo) @utils.db_except() def _auto_download_unviewed(self): """Automatically download any unviewed media. Runs every five minutes when auto-polling, so make sure is good""" if self._exiting: return download_list=self.db.get_media_for_download(False) #don't resume paused downloads if len(download_list)==0: return #no need to bother logging.debug("files ready to download:") total_size = 0 for d in download_list: title = self.db.get_feed_title(d[3]) logging.debug("%s, %i: %i" % (title, d[2], d[1])) total_size=total_size+int(d[1]) logging.info("adding up downloads, we need %i bytes" % (total_size)) if self._free_media_space(total_size): for d in download_list: self.mediamanager.download(d[0]) self.emit('entry-updated', d[2], d[3]) else: logging.error("we were unable to free up enough space.") #print download_list self.update_disk_usage() @utils.db_except() def _free_media_space(self, size_needed): """deletes media so that we have at least 'size_needed' bytes of free space. Returns True if successful, returns False if not (ie, too big)""" disk_total = utils.get_disk_total(self.mediamanager.get_media_dir()) disk_usage = self.mediamanager.get_disk_usage() disk_free = utils.get_disk_free(self.mediamanager.get_media_dir()) #adjust actual free space so we never fill up the drive if utils.RUNNING_SUGAR or utils.RUNNING_HILDON: free_buffer = 300000000 # 300 meg else: free_buffer = 10000000 # 10 meg size_to_free = 0 if self._auto_download_limiter: if self._auto_download_limit*1024 - disk_usage < size_needed: size_to_free = size_needed - (self._auto_download_limit*1024 - disk_usage) if disk_free + size_to_free < size_needed + free_buffer: size_to_free = size_needed + free_buffer - disk_free #if the disk isn't big enough, drop it like it's hot... if disk_total - free_buffer < size_needed: return False #if the media ain't big enough, pop it like it's hot... if disk_usage < size_to_free: return False if size_to_free == 0: return True #get the media that's currently in the player so we don't delete it if utils.HAS_GSTREAMER: media_in_player = self.player.get_queue() media_in_player = [m[4] for m in media_in_player] media_to_remove = [] removed_size = 0 for media_id,entry_id,feed_id,filename,date in self.db.get_deletable_media(): if removed_size >= size_to_free: disk_usage = self.mediamanager.get_disk_usage() if self._auto_download_limiter: if self._auto_download_limit*1024 - disk_usage < size_needed: logging.error("didn't free up the space like we thought1") return False if utils.get_disk_free(self.mediamanager.get_media_dir()) < size_needed + free_buffer: logging.error("didn't free up the space like we thought2" + str(utils.get_disk_free(self.mediamanager.get_media_dir()))) return False return True #don't remove anything that's queued in the player if utils.HAS_GSTREAMER: if media_id in media_in_player: continue try: size = os.stat(filename)[6] removed_size += size logging.info("removing: %s %i bytes for a total of %i" % (filename, size, removed_size)) self.db.delete_media(media_id) self.db.set_entry_read(entry_id, True) self.emit('entry-updated', entry_id, feed_id) except OSError, e: logging.warning("Couldn't remove %s: %s" % (filename, str(e))) return False @utils.db_except() def add_search_tag(self, query, tag_name): self.db.add_search_tag(query, tag_name) #could raise ptvDB.TagAlreadyExists, let it go self._saved_search = self.main_window.search_entry.get_text() self.emit('tags-changed', 0) while gtk.events_pending(): #wait for the list to update gtk.main_iteration() index = self.main_window.get_filter_index(tag_name) if index is not None: self.main_window.search_entry.set_text("") self.main_window.set_active_filter(index) else: logging.warning("we just added a search tag but it's not in the list") @utils.db_except() def remove_search_tag(self, tag_name): self.db.remove_tag(tag_name) self.emit('tags-changed', 0) while gtk.events_pending(): gtk.main_iteration() @utils.db_except() def change_search_tag(self, current_tag, new_tag=None, new_query=None): if new_tag is not None: self.db.rename_tag(current_tag, new_tag) self.main_window.rename_filter(current_tag, new_tag) current_tag = new_tag if new_query is not None: self.db.change_query_for_tag(current_tag, new_query) index = self.main_window.get_active_filter()[1] if self.main_window.get_active_filter()[0] == current_tag: self.set_state(TAG_SEARCH) #redundant, but good practice self._show_search(new_query, self._search(new_query)) @utils.db_except() def apply_tags_to_feed(self, feed_id, old_tags, new_tags): """take a list of tags and apply it to a feed""" old_set = set(old_tags) new_set = set(new_tags) removed_tags = list(old_set.difference(new_set)) added_tags = list(new_set.difference(old_set)) for tag in removed_tags: self.db.remove_tag_from_feed(feed_id, tag) for tag in added_tags: self.db.add_tag_for_feed(feed_id, tag) if removed_tags or added_tags: self.feed_list_view.set_selected(feed_id) self.emit('tags-changed', 0) self.feed_list_view.filter_all(False) #if old_tags is not None: # if ptvDB.NOTIFYUPDATES in old_tags: # self.emit('notify-tags-changed') # return #don't need the next test #if new_tags is not None: # if ptvDB.NOTIFYUPDATES in new_tags: # self.emit('notify-tags-changed') def _populate_feeds(self, callback=None, subset=FeedList.ALL): self.set_state(MAJOR_DB_OPERATION) self.main_window.display_status_message(_("Loading Feeds...")) self.feed_list_view.populate_feeds(callback, subset) def display_entry(self, entry_id, set_read=True, query=""): #FIXME: no more way to display a specific entry when download is # clicked #if entry_id is not None: # item = self.db.get_entry(entry_id) # media = self.db.get_entry_media(entry_id) # if media: # item['media']=media #else: # self._entry_view.display_item() # return # #if item.has_key('media') == False: # if item['read']==0 and set_read: # item['read'] = 1 # self.db.set_entry_read(entry_id,1) # #self._entry_list_view.mark_as_viewed(entry_id) # #self.feed_list_view.mark_entries_read(1, feed_id=item['feed_id']) # for f in self.db.get_pointer_feeds(item['feed_id']): # self.feed_list_view.update_feed_list(f,['readinfo','icon']) ##self._entry_view.display_item(item, query) pass def display_custom_entry(self, message): """Used by other classes so they don't need to know about EntryView""" self._entry_view.display_custom_entry(message) def undisplay_custom_entry(self): """Used by other classes so they don't need to know about EntryView""" self._entry_view.undisplay_custom_entry() @utils.db_except() def activate_link(self, link): """links can be basic hrefs, or they might be custom penguintv commands""" import urlparse try: parsed_url = urlparse.urlparse(link) except: logging.warning("Invalid link clicked: %s" % (link,)) return action=parsed_url[0] #protocol parameters=parsed_url[3] http_arguments=parsed_url[4] anchor = parsed_url[5] try: item=int(parsed_url[2]) except: pass if action == "keep": entry = self.db.get_entry(item) self.db.set_entry_keep(item, 1) self.emit('entry-updated', item, entry['feed_id']) elif action == "unkeep": entry = self.db.get_entry(item) self.db.set_entry_keep(item, 0) self.emit('entry-updated', item, entry['feed_id']) elif action == "download": self.mediamanager.unpause_downloads() self.mediamanager.download(item) entry_id = self.db.get_entryid_for_media(item) #self.db.set_media_viewed(item,False) feed_id = self.db.get_entry(entry_id)['feed_id'] self.emit('entry-updated', entry_id, feed_id) elif action=="resume" or action=="tryresume": self.do_resume_download(item) elif action=="play": if utils.RUNNING_SUGAR and not utils.HAS_GSTREAMER: dialog = gtk.Dialog(title=_("Enclosures Disabled"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label("Launching enclosed files is disabled on olpc until a mime system is developed. \n If you install GStreamer PenguinTV can use that. (email owen-olpc@ywwg.com for more info)") dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog return media = self.db.get_media(item) entry = self.db.get_entry(media['entry_id']) feed_title = self.db.get_feed_title(entry['feed_id']) if utils.is_known_media(media['file']): self.player.play(media['file'], utils.my_quote(feed_title) + " " + utils.get_hyphen() + " " + entry['title'], media['media_id'], context=self._hildon_context) else: if HAS_GNOME: try: gnome.url_show(media['file']) except: webbrowser.open_new_tab(media['file']) else: webbrowser.open_new_tab(media['file']) if not entry['keep']: self.db.set_entry_read(media['entry_id'],True) self.emit('entry-updated', media['entry_id'], entry['feed_id']) elif action=="downloadqueue": self.mediamanager.unpause_downloads() self.mediamanager.download(item, True) self.db.set_media_viewed(item,False) entry_id = self.db.get_entryid_for_media(item) feed_id = self.db.get_entry(entry_id)['feed_id'] self.emit('entry-updated', entry_id, feed_id) elif action=="queue": logging.info(parsed_url) elif action=="stop": newitem={} newitem['media_id']=item newitem['entry_id']=self.db.get_entryid_for_media(newitem['media_id']) self.do_cancel_download(newitem) elif action=="pause": self.do_pause_download(item) elif action=="clear" or action=="cancel": newitem={} newitem['media_id']=item newitem['entry_id']=self.db.get_entryid_for_media(newitem['media_id']) self.do_cancel_download(newitem) elif action=="delete": self.delete_media(item) entry_id = self.db.get_entryid_for_media(item) feed_id = self.db.get_entry(entry_id)['feed_id'] self.emit('entry-updated', entry_id, feed_id) elif action=="reveal": if utils.is_kde(): reveal_url = "file:" + parsed_url[1] + parsed_url[2] os.system('konqueror --select ' + reveal_url + ' &') else: reveal_url = "file:"+os.path.split(parsed_url[1]+parsed_url[2])[0] reveal_url = reveal_url.replace("%20"," ") if HAS_GNOME: try: gnome.url_show(reveal_url) except: webbrowser.open_new_tab(reveal_url) else: webbrowser.open_new_tab(reveal_url) elif action == "http" or action == "https": try: if len(parameters)>0: parameters = ";"+parameters else: parameters = "" except: parameters = "" try: if len(http_arguments)>0: http_arguments = "?"+http_arguments else: http_arguments="" except TypeError: #"len() of unsized object" http_arguments="" try: if len(anchor)>0: anchor="#"+anchor else: anchor="" except: anchor="" quoted_url = urllib.quote(parsed_url[1]+parsed_url[2]) #however don't quote * (yahoo news don't like it quoted) quoted_url = string.replace(quoted_url,"%2A","*") quoted_url = string.replace(quoted_url,"%2B","+") uri = parsed_url[0]+"://"+quoted_url+parameters+http_arguments+anchor if utils.RUNNING_SUGAR: from sugar.activity import activityfactory activityfactory.create_with_uri('org.laptop.WebActivity', uri) elif utils.RUNNING_HILDON: import osso.rpc rpc_handler = osso.rpc.Rpc(self._hildon_context) logging.debug("Trying to launch maemo browser") rpc_handler.rpc_run_with_defaults('osso_browser', 'open_new_window', (uri,)) elif HAS_GNOME: try: gnome.url_show(uri) except: webbrowser.open_new_tab(uri) else: webbrowser.open_new_tab(uri) elif action=="file": logging.info(parsed_url[0]+"://"+urllib.quote(parsed_url[1]+parsed_url[2])) if HAS_GNOME: try: gnome.url_show(parsed_url[0]+"://"+urllib.quote(parsed_url[1]+parsed_url[2])) except: webbrowser.open_new_tab(parsed_url[0]+"://"+urllib.quote(parsed_url[1]+parsed_url[2])) else: webbrowser.open_new_tab(parsed_url[0]+"://"+urllib.quote(parsed_url[1]+parsed_url[2])) elif action=="pane": if parsed_url[2] == "back": self.main_window.pane_to_feeds() @utils.db_except() def download_entry(self, entry_id): entry = self.db.get_entry(entry_id) self.mediamanager.download_entry(entry_id) if entry['read']: self.emit('entries-unviewed', [(entry['feed_id'], (entry_id,))]) #FIXME: oh yeah, bit of a hack self.feed_list_view.update_feed_list(entry['feed_id'], update_what=['icon'], update_data={'icon':'gtk-execute'}) @utils.db_except() def download_unviewed(self): self.mediamanager.unpause_downloads() download_list=self.db.get_media_for_download() if len(download_list) > 0: for d in download_list: title = self.db.get_feed_title(d[3]) logging.info("%s, %i: %i" % (title, d[2], d[1])) total_size=0 if len(download_list)==0: dialog = gtk.Dialog(title=_("No Unviewed Media"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("There is no unviewed media to download.")) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog return for d in download_list: total_size=total_size+int(d[1]) if total_size>100000000: #100 megs dialog = gtk.Dialog(title=_("Large Download"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("If PenguinTV downloads all of the unviewed media, \nit will take %(space)s. Do you wish to continue?") % {'space':utils.format_size(total_size)}) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog if response != gtk.RESPONSE_ACCEPT: return if self._free_media_space(total_size): gobject.idle_add(self._downloader_generator(download_list).next) else: dialog = gtk.Dialog(title=_("Not Enough Free Space"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("PenguinTV was unable to free enough disk space to download %(space)s of media.") % {'space':utils.format_size(total_size)}) dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() dialog.hide() del dialog @utils.db_except() def _downloader_generator(self, download_list): for d in download_list: #gtk.gdk.threads_enter() self.mediamanager.download(d[0]) self.db.set_media_viewed(d[0],False) self.emit('entry-updated', d[2], d[3]) #gtk.gdk.threads_leave() yield True #gtk.gdk.threads_leave() yield False def export_opml(self): if utils.RUNNING_HILDON: dialog = hildon.FileChooserDialog(self.main_window.window, action=gtk.FILE_CHOOSER_ACTION_SAVE) else: dialog = gtk.FileChooserDialog(_('Select OPML...'),None, action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) filter = gtk.FileFilter() filter.set_name("OPML files") filter.add_pattern("*.opml") dialog.add_filter(filter) filter = gtk.FileFilter() filter.set_name("All files") filter.add_pattern("*") dialog.add_filter(filter) dialog.set_current_name('mySubscriptions.opml') dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() if response == gtk.RESPONSE_OK: try: f = open(dialog.get_filename(), "w") self.main_window.display_status_message(_("Exporting Feeds...")) updater, db = self._get_updater() task_id = updater.queue(db.export_OPML, f) self._gui_updater.queue(self.main_window.display_status_message, "", task_id) except: pass elif response == gtk.RESPONSE_CANCEL: #print 'Closed, no files selected' pass dialog.destroy() @utils.db_except() def remove_feed(self, feed): #select entries and get all the media ids, and tell them all to cancel #in case they are downloading for entry in self.db.get_entrylist(feed): for medium in self.db.get_entry_media(entry[0]): if self.mediamanager.has_downloader(medium['media_id']): self.mediamanager.stop_download(medium['media_id']) self.db.delete_feed(feed) self.emit('feed-removed', feed) self.update_disk_usage() def poll_feeds(self, args=0): args = args | ptvDB.A_ALL_FEEDS self.do_poll_multiple(None, args) @utils.db_except() def import_subscriptions(self, f, opml=True): if self._state == MAJOR_DB_OPERATION or not self._app_loaded: self._for_import.append((1, f)) return def import_gen(f): dialog = gtk.Dialog(title=_("Importing OPML file"), parent=None, flags=gtk.DIALOG_MODAL, buttons=None) label = gtk.Label(_("Loading the feeds from the OPML file")) dialog.vbox.pack_start(label, True, True, 0) label.show() bar = gtk.ProgressBar() dialog.vbox.pack_start(bar, True, True, 0) bar.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.show() gen = self.db.import_subscriptions(f, opml) newfeeds = [] oldfeeds = [] feed_count=-1.0 i=1.0 for feed in gen: #status, value if feed_count == -1: #first yield is the total count feed_count = feed[1] continue if feed==(-1,0): #either EOL or error on insert continue if self._exiting: dialog.hide() del dialog yield False #self.feed_list_view.add_feed(feed) if feed[0]==1: newfeeds.append(feed[1]) elif feed[0]==0: oldfeeds.append(feed[1]) bar.set_fraction(i/feed_count) i+=1.0 yield True #if len(newfeeds)>10: #it's faster to just start over if we have a lot of feeds to add self.main_window.search_container.set_sensitive(False) self.feed_list_view.clear_list() self._populate_feeds(self.done_pop_with_poll) self.emit('tags-changed', 0) self.main_window.display_status_message("") task_id = self._gui_updater.queue(self.__finish_import, None, self._polling_taskinfo) dialog.hide() del dialog if len(newfeeds)==1: self.feed_list_view.set_selected(newfeeds[0]) elif len(oldfeeds)==1: self.feed_list_view.set_selected(oldfeeds[0]) yield False #schedule the import pseudo-threadidly gobject.idle_add(import_gen(f).next) def __finish_import(self): self.main_window.display_status_message("") self.feed_list_view.resize_columns() selected = self.feed_list_view.get_selected() @utils.db_except() def mark_entry_as_viewed(self,entry, feed_id=None): #, update_entrylist=True): if feed_id is None: feed_id = self.db.get_entry(entry)['feed_id'] if self.db.get_flags_for_feed(feed_id) & ptvDB.FF_MARKASREAD == ptvDB.FF_MARKASREAD: return entry = self.db.get_entry(entry) if not entry['keep']: self.db.set_entry_read(entry['entry_id'],True) self.emit('entry-updated', entry['entry_id'], feed_id) @utils.db_except() def mark_entrylist_viewstate(self, viewlist, read=True): #print viewlist if len(viewlist) == 0: return entrylist = [] for feed_id, id_list in viewlist: entrylist += id_list self.db.set_entrylist_read(entrylist, read) @utils.db_except() def mark_entry_as_unviewed(self,entry): media = self.db.get_entry_media(entry) self.db.set_entry_read(entry, 0) if media: for medium in media: self.db.set_media_viewed(medium['media_id'],False) #self.update_entry_list(entry) else: self.db.set_entry_read(entry, 0) #self.update_entry_list(entry) e = self.db.get_entry(entry) self.emit('entry-updated', entry, e['feed_id']) #self.feed_list_view.mark_entries_read(-1) @utils.db_except() def mark_feed_as_viewed(self,feed): changed = self.db.mark_feed_as_viewed(feed) self.emit('entries-viewed', [(feed, changed)]) #self._entry_list_view.populate_if_selected(feed) #self.feed_list_view.update_feed_list(feed, ['readinfo']) @utils.db_except() def mark_all_viewed(self): feedlist = self.db.get_feedlist() feedlist = [f[0] for f in feedlist] def _mark_viewed_gen(feedlist): self.main_window.display_status_message(_("Marking everything as viewed..."), MainWindow.U_LOADING) self.set_state(MAJOR_DB_OPERATION) i=-1.0 for f in feedlist: if self._exiting: break i+=1.0 self.mark_feed_as_viewed(f) self.main_window.update_progress_bar(i/len(feedlist), MainWindow.U_LOADING) yield True self._unset_state(True) #force exit of done_loading state self.set_state(DEFAULT) self.main_window.update_progress_bar(-1) self.main_window.display_status_message("") yield False gobject.idle_add(_mark_viewed_gen(feedlist).next) @utils.db_except() def play_entry(self,entry_id): media = self.db.get_entry_media(entry_id) entry = self.db.get_entry(entry_id) feed_title = self.db.get_feed_title(entry['feed_id']) if not entry['keep']: self.db.set_entry_read(entry_id, True) filelist=[] if media: for medium in media: filelist.append([medium['file'], utils.my_quote(feed_title) + " " + utils.get_hyphen() + " " + entry['title'], medium['media_id']]) if not entry['keep']: self.db.set_media_viewed(medium['media_id'],True) self.player.play_list(filelist, context=self._hildon_context) self.emit('entry-updated', entry_id, entry['feed_id']) @utils.db_except() def play_unviewed(self): playlist = self.db.get_unplayed_media(True) #set viewed playlist.reverse() self.player.play_list([ [item[3], utils.my_quote(item[5]) + " " + utils.get_hyphen() + " " + item[4], item[0]] for item in playlist], context=self._hildon_context) for row in playlist: self.feed_list_view.update_feed_list(row[2],['readinfo']) def _on_item_not_supported(self, player, filename, name, userdata): # I thought this would be called from main thread, but apparently not gtk.gdk.threads_enter() dialog = gtk.Dialog(title=_("Can't Play File"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OPEN, gtk.RESPONSE_ACCEPT)) label = gtk.Label("PenguinTV can not play this file. \nWould you like to try opening it in the default system player?") dialog.vbox.pack_start(label, True, True, 0) label.show() dialog.set_transient_for(self.main_window.get_parent()) response = dialog.run() dialog.hide() if response == gtk.RESPONSE_ACCEPT: self.player.play(filename, name, userdata, force_external=True, context=self._hildon_context) #retry, force external player dialog.destroy() gtk.gdk.threads_leave() def _on_gst_tick(self, player): if utils.RUNNING_HILDON: gst_player = self.main_window.get_gst_player() if gst_player is not None: if gst_player.has_video(): osso.DeviceState(self._hildon_context).display_blanking_pause() @utils.db_except() def refresh_feed(self, feed): if not self._net_connected: return info = self.db.get_feed_info(feed) updater, db = self._get_updater() def _refresh_cb(update_data, success): self._threaded_emit('feed-polled', feed, update_data) if update_data.has_key('new_entryids'): self._gui_updater.queue(self._article_sync.get_readstates_for_entries, update_data['new_entryids']) if update_data.has_key('mod_entryids'): if self._article_sync.is_enabled(): #if len(update_data['mod_entryids']) > 0: # logging.debug("entries have been modified, resubmitting: %s" % str(update_data['mod_entryids'])) for e_id in update_data['mod_entryids']: self._gui_updater.queue(self._article_sync.diff_entry, (e_id, feed)) if info['lastpoll'] == 0 and success: self._mark_all_media_but_first(feed, db=db) self.main_window.display_status_message(_("Polling Feed...")) self._poll_new_entries = [] task_id = updater.queue(db.poll_feed_trap_errors,(feed, _refresh_cb)) def _unset_state(self, authorize=False): """gets app ready to display new state by unloading current state. Also checks if we are loading feeds, in which case state can not change. To unset loading_feeds, we take a "manual override" argument""" #bring state back to default if self._state != MANUAL_SEARCH: #save filter for later self._saved_filter = self.main_window.get_active_filter()[1] if self._state == MAJOR_DB_OPERATION: if not authorize: raise CantChangeState,"can't interrupt feed loading" else: self._state = DONE_MAJOR_DB_OPERATION #we don't know what the new state will be... return if self._state == DEFAULT: return def set_state(self, new_state, data=None): self.emit('state-changed', new_state, data) @utils.db_except() def _state_changed_cb(self, app, new_state, data): if self._state == new_state: return if new_state == DEFAULT and self._state == MAJOR_DB_OPERATION: logging.error("possible programming error: must unset major_db_op manually (self._unset_state(True))") #self.main_window.set_state(new_state, data) return #do nothing self._unset_state() if new_state == MANUAL_SEARCH: assert utils.HAS_SEARCH elif new_state == TAG_SEARCH: assert utils.HAS_SEARCH elif new_state == MAJOR_DB_OPERATION: pass if self._state == MANUAL_SEARCH and new_state == DEFAULT and data != True: self._saved_search = self.main_window.search_entry.get_text() selected = self.feed_list_view.get_selected() if selected is not None: name = self.main_window.get_filter_name(self._saved_filter) if name not in self.db.get_tags_for_feed(selected): self.main_window.set_active_filter(FeedList.ALL) else: self.main_window.set_active_filter(self._saved_filter) else: self.main_window.set_active_filter(self._saved_filter) self._state = new_state @utils.db_except() def _search(self, query, blacklist=None): try: query = query.replace("!","") result = self.db.search(query, blacklist=blacklist) logging.debug("search results: %i, %i" % (len(result[0]), len(result[1]))) except Exception, e: logging.warning("Error with that search term: " + str(query) + str(e)) result=([],[]) return result @utils.db_except() def _show_search(self, query, result): if self._state != MANUAL_SEARCH and self._state != TAG_SEARCH: logging.warning("incorrect state, aborting" + str(self._state)) return try: self._entry_list_view.show_search_results(result[1], query) self.feed_list_view.show_search_results(result[0]) except ptvDB.BadSearchResults, e: logging.warning(str(e)) self.db.reindex(result[0], [i[0] for i in result[1]]) self._show_search(query, self._search(query)) return def _update_search(self): self._search(self._saved_search) def threaded_search(self, query): if query != "": if self._threaded_searcher is None: self._threaded_searcher = PenguinTVApp._threaded_searcher(query, self.__got_search, self._searcher_done) self._threaded_searcher.set_query(query) if not self._waiting_for_search: self._waiting_for_search = True self._threaded_searcher.start() def __got_search(self, query, results): self._gui_updater.queue(self._got_search, (query,results)) def _searcher_done(self): self._waiting_for_search = False def _got_search(self, query, results): self.set_state(MANUAL_SEARCH) self._show_search(query, results) if utils.HAS_LUCENE: import PyLucene threadclass = PyLucene.PythonThread else: threadclass = threading.Thread class _threaded_searcher(threadclass): def __init__(self, query, callback, done_callback): PenguinTVApp.threadclass.__init__(self) self.query = query self.callback = callback self.done_callback = done_callback self.db = ptvDB.ptvDB() def set_query(self, query): self.query = query.replace("!","") def run(self): old_query = self.query+"different" waits=0 while waits<3: if self.query == old_query: #we get .2*3 seconds to wait for more characters waits+=1 else: waits=0 try: old_query = self.query self.callback(self.query, self.db.search(self.query)) except: self.callback(self.query, ([],[])) time.sleep(.2) #give signals a chance to get around self.done_callback() def manual_search(self, query): #self._saved_search = query #even if it's blank if len(query)==0: self.set_state(DEFAULT) return self.set_state(MANUAL_SEARCH) self._show_search(query, self._search(query)) def entrylist_selecting_right_now(self): return self._entry_list_view.presently_selecting def select_feed(self, feed_id): self.feed_list_view.set_selected(feed_id) @utils.db_except() def select_entry(self, entry_id): feed_id = self.db.get_entry(entry_id)['feed_id'] self.select_feed(feed_id) #FIXME: doesn't display entry because list isn't populated yet self.display_entry(entry_id) self.main_window.notebook_select_page(0) @utils.db_except() def change_filter(self, current_filter, tag_type): filter_id = self.main_window.get_active_filter()[1] if utils.HAS_SEARCH and filter_id == FeedList.SEARCH: self._show_search(self._saved_search, self._search(self._saved_search)) if self._threaded_searcher: if not self._waiting_for_search: self.main_window.search_entry.set_text(self._saved_search) else: if tag_type == ptvDB.T_SEARCH: self.set_state(TAG_SEARCH) query = self.db.get_search_tag(current_filter) self._show_search(query, self._search(query)) else: self.set_state(DEFAULT, True) #gonna filter! self.main_window.feed_list_view.set_filter(filter_id, current_filter) def show_downloads(self): self.mediamanager.generate_playlist() self.mediamanager.show_downloads() @utils.db_except() def stop_downloads(self): """stops downloading everything -- really just pauses them. Just sets a flag, really. progress_callback does the actual work""" if self.mediamanager.pause_state == MediaManager.RUNNING: download_stopper_thread = threading.Thread(None, self.mediamanager.stop_all_downloads) download_stopper_thread.start() #this isn't gonna block any more! self.db.pause_all_downloads() #blocks, but prevents race conditions @utils.db_except() def pause_downloads(self): if self.mediamanager.pause_state == MediaManager.RUNNING: download_pauser_thread = threading.Thread(None, self.mediamanager.pause_all_downloads) download_pauser_thread.start() #this isn't gonna block any more! self.db.pause_all_downloads() #blocks, but prevents race conditions @utils.db_except() def change_layout(self, layout): if self.main_window.layout != layout: self.set_state(DEFAULT) val = self.feed_list_view.get_selected() if val is None: val = 0 self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_feed', val) val = self._entry_list_view.get_selected_id() if val is None: val = 0 self.db.set_setting(ptvDB.INT, '/apps/penguintv/selected_entry', val) self.feed_list_view.interrupt() self.feed_list_view.set_selected(None) self.feed_list_view.finalize() self._entry_list_view.finalize() self._entry_view.finish() while gtk.events_pending(): #make sure everything gets shown gtk.main_iteration() gc.collect() self.main_window.activate_layout(layout) self.feed_list_view = self.main_window.feed_list_view self._entry_list_view = self.main_window.entry_list_view self._entry_view = self.main_window.entry_view self._connect_signals() self.main_window.changing_layout = False self._populate_feeds(self._done_populating) self.update_disk_usage() #val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/selected_feed', 0) #if val > 0: # self.feed_list_view.set_selected(val) val = self.db.get_setting(ptvDB.INT, '/apps/penguintv/selected_entry', 0) if val > 0: #self._entry_list_view.set_selected(val) self.select_entry(val) def on_window_changing_layout_delete_event(self, widget, event): self.main_window.changing_layout = False return widget.hide_on_delete() def _gconf_set_bt_maxport(self, client, *args, **kwargs): maxport = client.get_int('/apps/penguintv/bt_max_port') self.set_bt_maxport(maxport) self.window_preferences.set_bt_settings(self._bt_settings) def set_bt_maxport(self, maxport): self._bt_settings['max_port']=maxport def _gconf_set_bt_minport(self, client, *args, **kwargs): minport = client.get_int('/apps/penguintv/bt_min_port') self.set_bt_minport(minport) self.window_preferences.set_bt_settings(self._bt_settings) def set_bt_minport(self, minport): self._bt_settings['min_port']=minport def _gconf_set_bt_ullimit(self, client, *args, **kwargs): ullimit = client.get_int('/apps/penguintv/bt_ul_limit') self.set_bt_ullimit(ullimit) self.window_preferences.set_bt_settings(self._bt_settings) def set_bt_ullimit(self, ullimit): self._bt_settings['ul_limit']=ullimit def _gconf_set_polling_frequency(self, client, *args, **kwargs): freq = client.get_int('/apps/penguintv/feed_refresh_frequency') self.set_polling_frequency(freq) def set_polling_frequency(self, freq): if self.polling_frequency != freq*60*1000: self.polling_frequency = freq*60*1000 gobject.timeout_add(self.polling_frequency,self.do_poll_multiple, self.polling_frequency) self.window_preferences.set_feed_refresh_frequency(freq) def _gconf_set_media_storage_location(self, client, *args, **kwargs): val = client.get_string('/apps/penguintv/media_storage_location') self.set_media_storage_location(val) def set_media_storage_location(self, location): #try: old_dir, remap_dir = self.mediamanager.set_media_dir(location) #except: # self.window_preferences.alert_bad_storage_location(location) # return if remap_dir is not None: self.db.relocate_media(old_dir, remap_dir) gst_player = self.main_window.get_gst_player() if gst_player is not None: gst_player.relocate_media(old_dir, remap_dir) self.window_preferences.set_media_storage_location(location) def _gconf_set_media_storage_style(self, client, *args, **kwargs): val = client.get_int('/apps/penguintv/media_storage_style') self.set_media_storage_style(val) def set_media_storage_style(self, style): self.mediamanager.set_storage_style(style, True) def get_feed_refresh_method(self): return self.feed_refresh_method @utils.db_except() def _gconf_set_feed_refresh_method(self, client, *args, **kwargs): refresh = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method', 'auto') self.set_feed_refresh_method(refresh, client) @utils.db_except() def set_feed_refresh_method(self, refresh, client=None): if refresh == 'auto': self.feed_refresh_method=REFRESH_AUTO self.polling_frequency = AUTO_REFRESH_FREQUENCY gobject.timeout_add(self.polling_frequency,self.do_poll_multiple, self.polling_frequency) elif refresh == 'specified': self.feed_refresh_method=REFRESH_SPECIFIED if utils.HAS_GCONF: self._gconf_set_polling_frequency(client,None,None) else: self.set_polling_frequency(self.db.get_setting(ptvDB.INT, '/apps/penguintv/feed_refresh_frequency', 5)) else: self.feed_refresh_method=REFRESH_NEVER @utils.db_except() def add_feed(self, url, title, tags=[]): """Inserts the url and starts the polling process""" #FIXME: if we add feed while doing a major db operation like mark all, #FIXME: this won't work if self._state == MAJOR_DB_OPERATION or not self._app_loaded: self._for_import.append((0, url, title)) return self.main_window.display_status_message(_("Trying to poll feed...")) feed_id = -1 try: feed_id = self.db.insertURL(url, title) if len(tags) > 0 and not utils.RUNNING_SUGAR: for tag in tags: self.db.add_tag_for_feed(feed_id, tag) self.feed_list_view.add_feed(feed_id) #let signals take care of this??? self.main_window.select_feed(feed_id) if len(tags) > 0 and not utils.RUNNING_SUGAR: self.emit('tags-changed', 0) updater, db = self._get_updater() self._poll_new_entries = [] updater.queue(db.poll_feed_trap_errors, (feed_id,self._db_add_feed_cb)) except ptvDB.FeedAlreadyExists, e: self.main_window.select_feed(e.feed) self.main_window.hide_window_add_feed() return feed_id def _db_add_feed_cb(self, feed, success): self._threaded_emit('feed-polled', feed['feed_id'], feed) self._threaded_emit('feed-added', feed['feed_id'], success) def __feed_added_cb(self, app, feed_id, success): if success: self._mark_all_media_but_first(feed_id) @utils.db_except() def _mark_all_media_but_first(self, feed_id, db=None): """mark all media read except first one. called when we first add a feed""" if db is None: db = self.db all_feeds_list = db.get_media_for_download() this_feed_list = [item[2] for item in all_feeds_list if item[3] == feed_id] db.set_entrylist_read(this_feed_list[1:], True) self.mark_entrylist_viewstate([(feed_id, this_feed_list[1:])], True) self.emit('entries-viewed', [(feed_id, this_feed_list[1:])]) if self._auto_download: self._auto_download_unviewed() @utils.db_except() def add_feed_filter(self, pointed_feed_id, filter_name, query): try: feed_id = self.db.add_feed_filter(pointed_feed_id, filter_name, query) except ptvDB.FeedAlreadyExists, f: self.main_window.select_feed(f) return self.feed_list_view.add_feed(feed_id) self.main_window.select_feed(feed_id) @utils.db_except() def set_feed_filter(self, pointer_feed_id, filter_name, query): self.db.set_feed_filter(pointer_feed_id, filter_name, query) #FIXME: should emit a signal so that planetview updates its title too self.feed_list_view.update_feed_list(pointer_feed_id,['title'],{'title':filter_name}) self.feed_list_view.resize_columns() @utils.db_except() def delete_entry_media(self, entry_id): """Delete all media for an entry""" medialist = self.db.get_entry_media(entry_id) if medialist: for medium in medialist: if medium['download_status']==ptvDB.D_DOWNLOADED or medium['download_status']==ptvDB.D_RESUMABLE: self.delete_media(medium['media_id']) feed_id = self.db.get_entry(entry_id)['feed_id'] self.emit('entry-updated', entry_id, feed_id) self.update_disk_usage() @utils.db_except() def delete_media(self, media_id, update_ui=True, entry_id=None): """Deletes specific media id""" self.db.delete_media(media_id) self.mediamanager.generate_playlist() self.player.unqueue(media_id) self.db.set_media_viewed(media_id,True, entry_id) if update_ui: self.main_window.update_downloads() self.update_disk_usage() m = self.db.get_media(media_id) self.emit('entry-updated', m['entry_id'], m['feed_id']) def delete_feed_media(self, feed_id): """Deletes media for an entire feed. Calls generator _delete_media_generator""" gobject.idle_add(self._delete_media_generator(feed_id).next) @utils.db_except() def _delete_media_generator(self, feed_id): entrylist = self.db.get_entrylist(feed_id) if entrylist: for entry in entrylist: #gtk.gdk.threads_enter() medialist = self.db.get_entry_media(entry[0]) if medialist: for medium in medialist: if medium['download_status']==ptvDB.D_DOWNLOADED or medium['download_status']==ptvDB.D_RESUMABLE: self.delete_media(medium['media_id'], False) self._entry_view.update_if_selected(entry[0], feed_id) #gtk.gdk.threads_leave() yield True #gtk.gdk.threads_enter() self.update_entry_list() self.mediamanager.generate_playlist() self.update_disk_usage() else: pass #gtk.gdk.threads_enter() self.feed_list_view.update_feed_list(feed_id, ['readinfo','icon']) #gtk.gdk.threads_leave() yield False @utils.db_except() def do_cancel_download(self, item): """cancels a download and cleans up. Right now there's redundancy because we call this twice for files that are downloading -- once when we ask it to stop downloading, and again when the callback tells the thread to stop working. how to make this better?""" d = None try: d = self.mediamanager.get_downloader(item['media_id']) self.mediamanager.stop_download(item['media_id']) except Exception, e: pass #download may not be active anymore, but that's ok self.db.set_media_download_status(item['media_id'],ptvDB.D_NOT_DOWNLOADED) self.delete_media(item['media_id'], False, item['entry_id']) #marks as viewed if self._exiting: self.feed_list_view.filter_all() #to remove active downloads from the list return self.feed_list_view.filter_all() #to remove active downloads from the list if d is not None: self.emit('download-finished', d) else: feed_id = self.db.get_entry(item['entry_id'])['feed_id'] self.emit('entry-updated', item['entry_id'], feed_id) @utils.db_except() def do_pause_download(self, media_id): self.mediamanager.get_downloader(media_id).pause() self.db.set_media_download_status(media_id,ptvDB.D_RESUMABLE) self.db.set_media_viewed(media_id,0) self.db.set_entry_read(media_id,0) @utils.db_except() def do_resume_download(self, media_id): self.mediamanager.unpause_downloads() self.mediamanager.download(media_id, False, True) #resume please self.db.set_media_viewed(media_id,False) entry_id = self.db.get_entryid_for_media(media_id) feed_id = self.db.get_entry(entry_id)['feed_id'] self.emit('entry-updated', entry_id, feed_id) @utils.db_except() def _download_finished(self, d): """Process the data from a callback for a downloaded file""" self.update_disk_usage() if d.status==Downloader.FAILURE: self.db.set_media_download_status(d.media['media_id'],ptvDB.D_ERROR,d.media['errormsg']) elif d.status==Downloader.STOPPED or d.status==Downloader.PAUSED: pass elif d.status==Downloader.FINISHED or d.status==Downloader.FINISHED_AND_PLAY: if os.stat(d.media['file'])[6] < int(d.media['size']/2) and os.path.isfile(d.media['file']): #don't check dirs self.db.set_entry_read(d.media['entry_id'],False) self.db.set_media_viewed(d.media['media_id'],False) self.db.set_media_download_status(d.media['media_id'],ptvDB.D_DOWNLOADED,_("File did not download completely")) d.status = Downloader.FAILURE else: if d.status==Downloader.FINISHED_AND_PLAY: entry = self.db.get_entry(d.media['entry_id']) if not entry['keep']: self.db.set_entry_read(d.media['entry_id'],True) self.db.set_media_viewed(d.media['media_id'], True) entry = self.db.get_entry(d.media['entry_id']) feed_title = self.db.get_feed_title(entry['feed_id']) self.player.play(d.media['file'], utils.my_quote(feed_title) + " " + utils.get_hyphen() + " " + entry['title'], d.media['media_id'], context=self._hildon_context) else: entry = self.db.get_entry(d.media['entry_id']) if not entry['keep']: self.db.set_entry_read(d.media['entry_id'],False) self.db.set_media_viewed(d.media['media_id'],False) self.db.set_media_download_status(d.media['media_id'],ptvDB.D_DOWNLOADED) self.emit('download-finished', d) if self._exiting: self.feed_list_view.filter_all() #to remove active downloads from the list return self.emit('entry-updated', d.media['entry_id'], d.media['feed_id']) self.feed_list_view.filter_all() #to remove active downloads from the list @utils.db_except() def rename_feed(self, feed_id, name): oldname = self.db.get_feed_title(feed_id) if len(name)==0: self.db.set_feed_name(feed_id, None) #gets the title the feed came with name = self.db.get_feed_title(feed_id) else: self.db.set_feed_name(feed_id, name) self.emit('feed-name-changed', feed_id, oldname, name) def _gconf_set_auto_resume(self, client, *args, **kwargs): autoresume = client.get_bool('/apps/penguintv/auto_resume') self.set_auto_resume(autoresume) def set_auto_resume(self, autoresume): self.window_preferences.set_auto_resume(autoresume) self._autoresume = autoresume def _gconf_set_poll_on_startup(self, client, *args, **kwargs): poll_on_startup = client.get_bool('/apps/penguintv/poll_on_startup') self.set_poll_on_startup(poll_on_startup) self.window_preferences.set_poll_on_startup(poll_on_startup) def set_poll_on_startup(self, poll_on_startup): self.poll_on_startup = poll_on_startup def _gconf_set_cache_images(self, client, *args, **kwargs): cache_images = client.get_bool('/apps/penguintv/cache_images_locally') self.set_cache_images(cache_images) self.window_preferences.set_cache_images(cache_images) def set_cache_images(self, cache_images): self.db.set_cache_images(cache_images) def _gconf_set_auto_download(self, client, *args, **kwargs): auto_download = client.get_bool('/apps/penguintv/auto_download') self.set_auto_download(auto_download) self.window_preferences.set_auto_download(auto_download) def set_auto_download(self, auto_download): self._auto_download = auto_download def _gconf_set_show_notification_always(self, client, *args, **kwargs): show_notification_always = client.get_bool('/apps/penguintv/show_notification_always') self.window_preferences.set_show_notification_always(show_notification_always) if utils.HAS_STATUS_ICON: self._status_icon.set_show_always(show_notification_always) def set_show_notification_always(self, show_notification_always): if utils.HAS_STATUS_ICON: self._status_icon.set_show_always(show_notification_always) def _gconf_set_show_notifications(self, client, *args, **kwargs): show_notifications = client.get_bool('/apps/penguintv/show_notifications') self.emit('setting-changed', ptvDB.BOOL, '/apps/penguintv/show_notifications', show_notifications) def _gconf_set_auto_download_limiter(self, client, *args, **kwargs): auto_download_limiter = client.get_bool('/apps/penguintv/auto_download_limiter') self.set_auto_download_limiter(auto_download_limiter) self.window_preferences.set_auto_download_limiter(auto_download_limiter) def set_auto_download_limiter(self, auto_download_limiter): self._auto_download_limiter = auto_download_limiter def _gconf_set_auto_download_limit(self, client, *args, **kwargs): auto_download_limit = client.get_int('/apps/penguintv/auto_download_limit') self.set_auto_download_limit(auto_download_limit) self.window_preferences.set_auto_download_limit(auto_download_limit) def set_auto_download_limit(self, auto_download_limit): self._auto_download_limit = auto_download_limit @utils.db_except() def _gconf_set_app_window_layout(self, client, *args, **kwargs): layout = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/app_window_layout', 'standard') self.set_app_window_layout(layout) def set_app_window_layout(self, layout): self.main_window.layout=layout def _gconf_set_use_article_sync(self, client, *args, **kwargs): enabled = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/use_article_sync', False) if not utils.ENABLE_ARTICLESYNC: enabled = False self.set_use_article_sync(enabled) self.window_preferences.set_use_article_sync(enabled) if enabled: if self._state != MAJOR_DB_OPERATION: if not self._article_sync.is_authenticated(): self.sync_authenticate() else: self.window_preferences.set_sync_status(_("Not Logged In")) def set_use_article_sync(self, enabled): self._article_sync.set_enabled(enabled) #def _gconf_set_sync_username(self, client, *args, **kwargs): # username = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_username', "") # self.set_sync_username(username) # #def set_sync_username(self, username): # pass # #self._article_sync.set_username(username) # #self.window_preferences.set_sync_username(username) # #def _gconf_set_sync_password(self, client, *args, **kwargs): # password = self.db.get_setting(ptvDB.STRING, '/apps/penguintv/sync_password', "") # #self.set_sync_password(password) # #def set_sync_password(self, password): # pass # #self._article_sync.set_password(password) # #self.window_preferences.set_sync_password(password) def _gconf_set_sync_readonly(self, client, *args, **kwargs): readonly = self.db.get_setting(ptvDB.BOOL, '/apps/penguintv/sync_readonly', False) self.set_article_sync_readonly(readonly) def set_article_sync_readonly(self, readonly): self._article_sync.set_readonly(readonly) #def update_feed_list(self, feed_id=None): # self.feed_list_view.update_feed_list(feed_id) #for now, just update this ONLY def update_entry_list(self, entry_id=None): self._entry_list_view.update_entry_list(entry_id) def update_disk_usage(self): size = self.mediamanager.get_disk_usage() self.main_window.update_disk_usage(size) def _sensitize_search(self): self._gui_updater.queue(self.main_window._sensitize_search) def _done_populating(self): self._gui_updater.queue(self.done_populating) def _done_populating_dont_sensitize(self): self._gui_updater.queue(self.done_populating, False) def done_populating(self, sensitize=True): self._unset_state(True) #force exit of done_loading state self.set_state(DEFAULT) #redundant if self._article_sync.is_enabled(): self.sync_authenticate() if sensitize: self.main_window._sensitize_search() self._spawn_poller() gobject.timeout_add(2 * 60 * 1000, self._check_poller) if not self._firstrun and self.poll_on_startup: #don't poll on startup on firstrun, we take care of that gobject.timeout_add(30*1000,self.do_poll_multiple, 0) if not self.__importing: self.__importing = True for item in self._for_import: if item[0] == 0: #url typ, url, title = item self.add_feed(url, title) elif item[0] == 1: #opml typ, f = item try: self.import_subscriptions(f) except e: logging.error("Exception importing opml file:" + str(e)) self._for_import = [] self.__importing = False def done_pop_with_poll(self): self.done_populating() self.do_poll_multiple() def get_database_name(self): return os.path.join(utils.get_home(), "penguintv4.db") def toggle_net_connection(self): self.emit('online-status-changed', not self._net_connected) def _nm_device_now_active(self, *args): if self._nm_interface is not None: state = self._nm_interface.state() if state == 3: self.maybe_change_online_status(True) else: self.maybe_change_online_status(False) def _nm_device_no_longer_active(self, *args): if self._nm_interface is not None: state = self._nm_interface.state() if state == 3: self.maybe_change_online_status(True) else: self.maybe_change_online_status(False) def maybe_change_online_status(self, new_status): if new_status != self._net_connected: if new_status: self._article_sync.authenticate() else: self._article_sync.disconnected() self.emit('online-status-changed', new_status) def _progress_callback(self,d): """Callback for downloads. Not in main thread, so shouldn't generate gtk calls""" if self._exiting == 1: return 1 #returning one is what interrupts the download if d.media.has_key('size_adjustment'): if d.media['size_adjustment']==True: updater, db = self._get_updater() updater.queue(db.set_media_size,(d.media['media_id'], d.media['size'])) if self.main_window.changing_layout == False: #self._gui_updater.queue(self._entry_view.update_if_selected,(d.media['entry_id'],d.media['feed_id'])) self._gui_updater.queue(self._entry_view.progress_update,(d.media['entry_id'],d.media['feed_id'])) self._gui_updater.queue(self.main_window.update_download_progress) def _finished_callback(self,downloader): self._gui_updater.queue(self._download_finished, downloader) def polling_callback(self, args, cancelled=False): if not self._exiting: feed_id, update_data, total = args if len(update_data)>0: if (update_data['pollfail'] and not self._net_connected) or \ update_data.has_key('ioerror'): logging.warning("ioerror polling reset") updater, db = self._get_updater() db.interrupt_poll_multiple() self._polled = 0 #logging.debug("reset polling taskinfo 2468") self._polling_taskinfo = -1 self._poll_message = "" if not utils.RUNNING_HILDON: self._gui_updater.queue(self._article_sync.get_readstates_for_entries, self._poll_new_entries) self.main_window.update_progress_bar(-1, MainWindow.U_POLL) self.main_window.display_status_message(_("Trouble connecting to the internet"),MainWindow.U_POLL) gobject.timeout_add(2000, self.main_window.display_status_message,"") return else: update_data['polling_multiple'] = True if update_data.has_key('new_entryids'): if utils.RUNNING_HILDON: self._gui_updater.queue(self._article_sync.get_readstates_for_entries, update_data['new_entryids']) else: self._poll_new_entries += update_data['new_entryids'] if update_data.has_key('mod_entryids'): if self._article_sync.is_enabled(): #if len(update_data['mod_entryids']) > 0: # logging.debug("entries have been modified, resubmitting: %s" % str(update_data['mod_entryids'])) for e_id in update_data['mod_entryids']: self._gui_updater.queue(self._article_sync.diff_entry, (e_id, feed_id)) self._threaded_emit('feed-polled', feed_id, update_data) if update_data.has_key('first_poll'): if update_data['first_poll']: self._gui_updater.queue(self._mark_all_media_but_first, feed_id) if self._polling_thread == LOCAL: updater, db = self._get_updater() elif self._polling_thread == REMOTE: db = self.db if db.get_flags_for_feed(feed_id) & ptvDB.FF_DOWNLOADSINGLE == ptvDB.FF_DOWNLOADSINGLE: self._gui_updater.queue(self._mark_all_media_but_first, feed_id) elif not cancelled and feed_id != -1: #check image just in case self._gui_updater.queue(self.feed_list_view.update_feed_list, (feed_id,['image'])) self._gui_updater.queue(self._poll_update_progress, (total, cancelled)) def _poll_update_progress(self, total=0, cancelled=False): """Updates progress for do_poll_multiple, and also displays the "done" message""" self._polled += 1 if self._polled >= total or cancelled: #logging.debug("done polling multiple 1, updating readstates") if not utils.RUNNING_HILDON: self._article_sync.get_readstates_for_entries(self._poll_new_entries) self.main_window.update_progress_bar(-1,MainWindow.U_POLL) self.main_window.display_status_message(_("Feeds Updated"),MainWindow.U_POLL) self._polled = 0 #logging.debug("reset polling taskinfo 2508") self._polling_taskinfo = -1 self._poll_message = "" gobject.timeout_add(2000, self.main_window.display_status_message,"") else: d = { 'polled':self._polled, 'total':total} self.main_window.update_progress_bar(float(self._polled)/float(total),MainWindow.U_POLL) self.main_window.display_status_message(self._poll_message + " (%(polled)d/%(total)d)" % d,MainWindow.U_POLL) def _entry_image_download_callback(self, entry_id, html): self._gui_updater.queue(self._entry_view._images_loaded,(entry_id, html)) def _emit_change_setting(self, typ, datum, value): self.emit('setting-changed', typ, datum, value) def _threaded_emit(self, signal, *args): def do_emit(signal, *args): gtk.gdk.threads_enter() self.emit(signal, *args) gtk.gdk.threads_leave() return False gobject.idle_add(do_emit, signal, *args, **{"priority" : gobject.PRIORITY_HIGH}) def _get_updater(self): """if the updater thread is not running, or we never started one, delete and restart it. Otherwise return the current values""" if self._update_thread is not None: if self._update_thread.isAlive() and not self._update_thread.isDying(): updater = self._update_thread.get_updater() updater_thread_db = self._update_thread.get_db() if updater_thread_db is None: logging.error("updater db is none: 1") return (updater, updater_thread_db) else: del self._update_thread self._update_thread = self.DBUpdaterThread(self.polling_callback) self._update_thread.start() updater_thread_db = None updater = None while True: #this may race, so be patient updater = self._update_thread.get_updater() updater_thread_db = self._update_thread.get_db() if updater_thread_db is not None and updater is not None: break time.sleep(.05) if updater_thread_db is None: logging.error("updater db is none: 2") return (updater, updater_thread_db) class DBUpdaterThread(threadclass): def __init__(self, polling_callback): PenguinTVApp.threadclass.__init__(self) self.__isDying = False self.db = None self.updater = UpdateTasksManager.UpdateTasksManager(UpdateTasksManager.MANUAL, "db updater") self.threadSleepTime = 1.0 self.threadDieTime = 30.0 self.polling_callback = polling_callback self._db_lock = threading.Lock() self._restart_db = False def run(self): """ Until told to quit, retrieve the next task and execute it, calling the callback if any. """ if self.db == None: self._db_lock.acquire() self._start_db() self._db_lock.release() born_t = time.time() while self.__isDying == False: if self._restart_db: logging.debug("We were told to restart the database") if self.db is not None: logging.debug("and yet db is not none. what gives?") self._restart_db = False self._db_lock.acquire() self._start_db() self._db_lock.release() while self.updater.updater_gen().next(): #do we also need to check for db restarting here? if self.updater.exception is not None: if isinstance(self.updater.exception, OperationalError): self._db_lock.acquire() logging.warning("detected a database lock error, restarting threaded db") self.db._db.close() del self.db self._start_db() self._db_lock.release() if time.time() - born_t > self.threadDieTime: self.__isDying = True time.sleep(self.threadSleepTime) if self.db is not None: self.db.finish(False) self.db = None def _start_db(self): #traceback.print_stack() self.db = ptvDB.ptvDB(self.polling_callback) def get_db(self): #doesn't work, not run in thread self._db_lock.acquire() self._db_lock.release() if self.db is None: for i in range(1,10): if self.db is not None: return self.db time.sleep(0.2) logging.warning("db not found, starting a new one") self._restart_db = True for i in range(1,15): if self.db is not None: break time.sleep(0.2) if self.db is None: logging.error("problem restarting the database") else: logging.warning("DB restarted successfully") return self.db def get_updater(self): return self.updater def goAway(self): """ Exit the run loop next time through.""" logging.debug("got goAway signal, shutting down update thread") self.__isDying = True if self.db is not None: self.db.finish(vacuumok=False, correctthread=False) self.db = None def isDying(self): return self.__isDying class CantChangeState(Exception): def __init__(self,m): self.m = m def __str__(self): return self.m class AlreadyRunning(Exception): def __init__(self, remote_app): self.remote_app = remote_app def usage(): print "penguintv command line options:" print " -o [filename] Import an OPML file" print " -u [filename] Add an RSS url" print " --play Tell the media player to play" print " --pause Tell the media player to pause" print " --playpause Toggle between playing and pausing" print " --prev Go to the previous track" print " --next Go to the next track" print " [filename] (alternate) Import an RSS url" print " -h | --help This explanation" def do_commandline(remote_app=None, local_app=None): assert remote_app is not None or local_app is not None try: opts, args = getopt.getopt(sys.argv[1:], "ho:u:", ["help","play","pause","prev","next","playpause","playlist"]) except getopt.GetoptError: # print help information and exit: usage() sys.exit(2) #ignore --playlist, that is already taken care of before if len(opts) > 0: for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) elif o == '-o': if local_app is None: remote_app.ImportOpml(a) else: local_app.import_subscriptions(a) elif o == '-u': if local_app is None: remote_app.AddFeed(a) else: if utils.RUNNING_SUGAR: local_app.sugar_add_button.popup() else: local_app.main_window.show_window_add_feed(False) local_app.main_window.set_window_add_feed_location(url) elif o == '--play': if remote_app is not None: remote_app.Play() elif o == '--pause': if remote_app is not None: remote_app.Pause() elif o == '--next': if remote_app is not None: remote_app.Next() elif o == '--prev': if remote_app is not None: remote_app.Prev() elif o == '--playpause': if remote_app is not None: remote_app.PlayPause() if len(opts) == 0 and len(sys.argv) > 1: url = sys.argv[1] if local_app is None: remote_app.AddFeed(url) else: if utils.RUNNING_SUGAR: local_app.sugar_add_button.popup() else: local_app.main_window.show_window_add_feed(False) local_app.main_window.set_window_add_feed_location(url) if len(opts) == 0 and len(sys.argv) == 1 and local_app is None: usage() def do_quit(event, app): app.do_quit() setup_success = True def setup_database(): global setup_success try: home = utils.get_home() os.stat(os.path.join(home,"penguintv4.db")) db=sqlite.connect(os.path.join(home,"penguintv4.db"), timeout=10) db.isolation_level="DEFERRED" c = db.cursor() c.execute(u'SELECT rowid FROM feeds LIMIT 1') c.execute(u'SELECT value FROM settings WHERE data="db_ver"') db_ver = c.fetchone() if db_ver is None: db_ver = 0 else: db_ver = int(db_ver[0]) latest_ver = ptvDB.LATEST_DB_VER #print "got without object:",db_ver, latest_ver c.close() db.close() return True except: logging.debug("Didn't get sqlite the easy way, trying the hard way") try: db = ptvDB.ptvDB() db_ver, latest_ver = db.get_version_info() db.finish(vacuumok=False) except Exception, e: logging.error("Couldn't open database: %s", str(e)) return False def upgrade_db(dialog, cb): try: logging.info(_("Upgrading Database")) db = ptvDB.ptvDB() db.maybe_initialize_db() db.finish(vacuumok=False) logging.info("Done upgrading database") cb(True, dialog) except Exception, e: cb(False, dialog, str(e)) def upgrade_done(success, dialog, errormsg=None): global setup_success setup_success = success if errormsg is not None: logging.error("Problem upgrading DB: %s" % errormsg) dialog.destroy() gtk.main_quit() def pulse(progressbar): progressbar.pulse() return True def destroy(widget, data): gtk.main_quit() logging.info("Our database version: %i" % db_ver) logging.info("Program version: %i" % latest_ver) if db_ver == latest_ver: pass elif db_ver == -1: pass elif db_ver > latest_ver: logging.error("""The database you are running is from a later version of PenguinTV than the version you are currently running. Please upgrade back to the latest version of PenguinTV. To avoid errors and corruption, PenguinTV will quit now.""") dialog = gtk.Dialog(title=_("Database Version Mismatch"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_QUIT, gtk.RESPONSE_ACCEPT)) frame = gtk.Frame() title = gtk.Label() title.set_use_markup(True) title.set_markup(_("Database Version Mismatch")) frame.set_label_widget(title) hbox = gtk.HBox() hbox.set_spacing(12) image = gtk.image_new_from_stock(gtk.STOCK_DIALOG_ERROR, gtk.ICON_SIZE_DIALOG) hbox.pack_start(image, False, False, 12) label = gtk.Label(_("""The database you are running is from a later version of PenguinTV than the version you are currently running. To avoid errors and corruption, PenguinTV will quit now. Please upgrade back to the latest version of PenguinTV.""")) label.set_line_wrap(True) hbox.pack_start(label, True, True, 12) frame.add(hbox) frame.set_border_width(12) dialog.vbox.pack_start(frame, True, True, 0) label.show() dialog.resize(400,200) response = dialog.show_all() dialog.connect("response", destroy) dialog.connect("destroy", destroy) gtk.main() return False elif db_ver < latest_ver: logging.info("""The PenguinTV database is being upgraded. This may take a few minutes.""") dialog = gtk.Dialog(title=_("Upgrading Database"), parent=None, flags=gtk.DIALOG_MODAL) frame = gtk.Frame() title = gtk.Label() title.set_use_markup(True) title.set_markup(_("Upgrading Database")) frame.set_label_widget(title) vbox = gtk.VBox() vbox.set_spacing(12) hbox = gtk.HBox() hbox.set_spacing(12) image = gtk.image_new_from_stock(gtk.STOCK_EXECUTE, gtk.ICON_SIZE_DIALOG) hbox.pack_start(image, False, False, 12) label = gtk.Label(_("""The PenguinTV database is being upgraded. This may take a few minutes.""")) label.set_line_wrap(True) hbox.pack_start(label, True, True, 12) vbox.pack_start(hbox, True, True, 12) progressbar = gtk.ProgressBar() progressbar.set_pulse_step(0.05) vbox.pack_start(progressbar, False, False, 12) frame.add(vbox) frame.set_border_width(12) dialog.vbox.add(frame) dialog.resize(400,200) dialog.show_all() gobject.timeout_add(50, pulse, progressbar) t = threading.Thread(None, upgrade_db, args=(dialog, upgrade_done)) t.start() gtk.main() return setup_success return True if __name__ == '__main__': # Here starts the dynamic part of the program if utils.HAS_MOZILLA: if not os.path.exists('/usr/lib/xulrunner-1.9'): if not os.environ.has_key('MOZILLA_FIVE_HOME'): print """MOZILLA_FIVE_HOME not set. Please set before running Penguintv to prevent crashes.""" sys.exit(1) gtk.gdk.threads_init() if HAS_DBUS: bus = dbus.SessionBus() dubus = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/dbus') dubus_methods = dbus.Interface(dubus, 'org.freedesktop.DBus') if dubus_methods.NameHasOwner('com.ywwg.PenguinTV'): remote_object = bus.get_object("com.ywwg.PenguinTV", "/PtvApp") remote_app = dbus.Interface(remote_object, "com.ywwg.PenguinTV.AppInterface") if remote_app.GetDatabaseName() == os.path.join(utils.get_home(), "penguintv4.db"): do_commandline(remote_app=remote_app) sys.exit(0) if not setup_database(): logging.error("Error initializing database") sys.exit(1) if HAS_GNOME: logging.info("Have GNOME") gtk.window_set_auto_startup_notification(True) gnome.init("PenguinTV", utils.VERSION) playlist = None try: opts, args = getopt.getopt(sys.argv[1:], "p:", ["playlist="]) except getopt.GetoptError, e: pass if len(opts) > 0: print opts for o, a in opts: if o in ('-p', '--playlist'): playlist = a try: app = PenguinTVApp(playlist=playlist) # Instancing of the GUI except AlreadyRunning, e: logging.info("PenguinTV is already running, why didn't we catch it?") do_commandline(remote_app=e.remote_app) sys.exit(0) app.main_window.Show() #import psyco ##psyco.log("/home/owen/Desktop/psyco.log") ##psyco.profile() #psyco.full() if utils.is_kde(): try: from kdecore import KApplication, KCmdLineArgs, KAboutData description = "test kde" version = "1.0" aboutData = KAboutData ("", "",\ version, description, KAboutData.License_GPL,\ "(C) 2004-2008 Owen Williams") KCmdLineArgs.init (sys.argv, aboutData) app = KApplication () except: logging.error("Unable to initialize KDE") sys.exit(1) elif utils.RUNNING_HILDON: #no gnome, no gnomeapp logging.debug("Starting Hildon version") gtk.window_set_auto_startup_notification(True) app = PenguinTVApp() app.main_window.Show() else: logging.debug("No gnome") window = gtk.Window() app = PenguinTVApp() app.main_window.Show(window) window.connect('delete-event', do_quit, app) do_commandline(local_app=app) ##PROFILE #import cProfile #cProfile.run('gtk.main()', '/tmp/penguintv-prof') #sys.exit(0) gtk.main() PenguinTV-4.2.0/penguintv/ajax/0000755000000000000000000000000011450514774013234 5ustar PenguinTV-4.2.0/penguintv/ajax/MyTCPServer.py0000644000000000000000000000260311175704073015727 0ustar import SocketServer import random import logging class MyTCPServer(SocketServer.ForkingTCPServer): def __init__(self, server_address, RequestHandlerClass, store_location): SocketServer.ForkingTCPServer.__init__(self, server_address, RequestHandlerClass) self._key = "" self.generate_key() self._updates = [] self._quitting = False self.store_location = store_location def serve_forever(self): while 1: try: self.handle_request() except Exception, e: logging.error("Error in Ajax Server: %s" % str(e)) continue if self._quitting: logging.info('quitting tcp server') return if len(self._updates)>0: #We must have posted an update. So pop it (unlike in the request handler, #changes actually have an effect here!) self._updates.pop(0) def finish(self): self._quitting = True def generate_key(self): self._key = str(random.randint(1,1000000)) return self._key def get_key(self): return self._key def push_update(self, update): remove_list = [u for u in self._updates if u.split(" ")[0] == update.split(" ")[0]] for item in remove_list: self._updates.remove(item) self._updates.append(update) def peek_update(self): return self._updates[0] def peek_all(self): return "\n".join(self._updates) def clear_updates(self): self._updates = [] def update_count(self): return len(self._updates) PenguinTV-4.2.0/penguintv/ajax/__init__.py0000644000000000000000000000000010646750250015330 0ustar PenguinTV-4.2.0/penguintv/ajax/EntryInfoServer.py0000644000000000000000000000375311074444523016716 0ustar import os import logging import gtk import utils import SimpleHTTPServer import SimpleImageCache class EntryInfoServer(SimpleHTTPServer.SimpleHTTPRequestHandler): """This class is recreated on every GET call. So things changed in this scope don't stick""" _image_cache = SimpleImageCache.SimpleImageCache() def __init__(self, request, client_address, server): self._server = server SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, request, client_address, server) def do_GET(self): p = self.path[1:] #strip leading / splitted = p.split("/") key = "" command = "update" arg = "" if len(splitted) >= 1: key = splitted[0] if len(splitted) == 0 or key != self.server.get_key(): self.wfile.write("PenguinTV Unauthorized") return if len(splitted) >= 2: command = splitted[1] if len(splitted) >= 3: arg = splitted[2] if command == "update": if self.server.update_count()==0: self.wfile.write("") else: update = self.server.peek_update() self.wfile.write(update) elif command == "icon": theme = gtk.icon_theme_get_default() iconinfo = theme.lookup_icon(arg, 16, gtk.ICON_LOOKUP_NO_SVG) if iconinfo is not None: image_data = self._image_cache.get_image_from_file(iconinfo.get_filename()) self.wfile.write(image_data) else: logging.error("no icon found for: %s" % (arg,)) self.wfile.write("") elif command == "pixmaps": image_data = self._image_cache.get_image_from_file(utils.get_image_path(arg)) self.wfile.write(image_data) elif command == "cache": #strip out possible ../../../ crap. I think this is all I need? securitize = [s for s in splitted if s not in ("/",".","..")] filename = os.path.join(self._server.store_location, *securitize[2:]) if utils.RUNNING_HILDON: #not enough ram to cache it f = open(filename, "rb") image_data = f.read() f.close() else: image_data = self._image_cache.get_image_from_file(filename) self.wfile.write(image_data) PenguinTV-4.2.0/penguintv/FeedFilterDialog.py0000644000000000000000000000333410646750251016015 0ustar # Written by Owen Williams # see LICENSE for license information import penguintv import gtk class FeedFilterDialog: def __init__(self,xml,app): self._xml = xml self._app = app self._window = xml.get_widget("window_feed_filter") self._window.set_transient_for(self._app.main_window.get_parent()) for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) self._filter_name_entry = self._xml.get_widget("filter_name_entry") self._query_entry = self._xml.get_widget("query_entry") self._pointed_feed_label = self._xml.get_widget("pointed_feed_label") self._pointed_feed_id = -1 def show(self): self._filter_name_entry.grab_focus() self._filter_name_entry.set_text("") self._query_entry.set_text("") self._pointed_feed_label.set_text("") self._window.show() def _finish(self): self._app.add_feed_filter(self._pointed_feed_id, self._filter_name_entry.get_text(), self._query_entry.get_text()) self.hide() def on_window_feed_filter_delete_event(self, widget, event): return self._window.hide_on_delete() def hide(self): self._filter_name_entry.set_text("") self._query_entry.set_text("") self._pointed_feed_label.set_text("") self._window.hide() def set_pointed_feed(self, feed_id, name): self._pointed_feed_label.set_text(name) self._pointed_feed_id = feed_id def set_filter_name(self, name): self._filter_name_entry.set_text(name) def on_add_button_clicked(self,event): self._finish() def on_filter_name_entry_activate(self, event): self._query_entry.grab_focus() self.hide() def on_query_entry_activate(self, event): self._finish() def on_cancel_button_clicked(self,event): self.hide() PenguinTV-4.2.0/penguintv/BeautifulSoup/0000755000000000000000000000000011450514774015100 5ustar PenguinTV-4.2.0/penguintv/BeautifulSoup/__init__.py0000644000000000000000000000000011070742140017163 0ustar PenguinTV-4.2.0/penguintv/BeautifulSoup/BeautifulSoup.py0000644000000000000000000023004711070742101020230 0ustar """Beautiful Soup Elixir and Tonic "The Screen-Scraper's Friend" http://www.crummy.com/software/BeautifulSoup/ Beautiful Soup parses a (possibly invalid) XML or HTML document into a tree representation. It provides methods and Pythonic idioms that make it easy to navigate, search, and modify the tree. A well-formed XML/HTML document yields a well-formed data structure. An ill-formed XML/HTML document yields a correspondingly ill-formed data structure. If your document is only locally well-formed, you can use this library to find and process the well-formed part of it. Beautiful Soup works with Python 2.2 and up. It has no external dependencies, but you'll have more success at converting data to UTF-8 if you also install these three packages: * chardet, for auto-detecting character encodings http://chardet.feedparser.org/ * cjkcodecs and iconv_codec, which add more encodings to the ones supported by stock Python. http://cjkpython.i18n.org/ Beautiful Soup defines classes for two main parsing strategies: * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific language that kind of looks like XML. * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid or invalid. This class has web browser-like heuristics for obtaining a sensible parse tree in the face of common HTML errors. Beautiful Soup also defines a class (UnicodeDammit) for autodetecting the encoding of an HTML or XML document, and converting it to Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser. For more than you ever wanted to know about Beautiful Soup, see the documentation: http://www.crummy.com/software/BeautifulSoup/documentation.html Here, have some legalese: Copyright (c) 2004-2008, Leonard Richardson All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the the Beautiful Soup Consortium and All Night Kosher Bakery nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT. """ from __future__ import generators __author__ = "Leonard Richardson (leonardr@segfault.org)" __version__ = "3.0.7a" __copyright__ = "Copyright (c) 2004-2008 Leonard Richardson" __license__ = "New-style BSD" from sgmllib import SGMLParser, SGMLParseError import codecs import markupbase import types import re import sgmllib try: from htmlentitydefs import name2codepoint except ImportError: name2codepoint = {} try: set except NameError: from sets import Set as set #These hacks make Beautiful Soup able to parse XML with namespaces sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*') markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match DEFAULT_OUTPUT_ENCODING = "utf-8" # First, the classes that represent markup elements. class PageElement: """Contains the navigational information for some part of the page (either a tag or a piece of text)""" def setup(self, parent=None, previous=None): """Sets up the initial relations between this element and other elements.""" self.parent = parent self.previous = previous self.next = None self.previousSibling = None self.nextSibling = None if self.parent and self.parent.contents: self.previousSibling = self.parent.contents[-1] self.previousSibling.nextSibling = self def replaceWith(self, replaceWith): oldParent = self.parent myIndex = self.parent.contents.index(self) if hasattr(replaceWith, 'parent') and replaceWith.parent == self.parent: # We're replacing this element with one of its siblings. index = self.parent.contents.index(replaceWith) if index and index < myIndex: # Furthermore, it comes before this element. That # means that when we extract it, the index of this # element will change. myIndex = myIndex - 1 self.extract() oldParent.insert(myIndex, replaceWith) def extract(self): """Destructively rips this element out of the tree.""" if self.parent: try: self.parent.contents.remove(self) except ValueError: pass #Find the two elements that would be next to each other if #this element (and any children) hadn't been parsed. Connect #the two. lastChild = self._lastRecursiveChild() nextElement = lastChild.next if self.previous: self.previous.next = nextElement if nextElement: nextElement.previous = self.previous self.previous = None lastChild.next = None self.parent = None if self.previousSibling: self.previousSibling.nextSibling = self.nextSibling if self.nextSibling: self.nextSibling.previousSibling = self.previousSibling self.previousSibling = self.nextSibling = None return self def _lastRecursiveChild(self): "Finds the last element beneath this object to be parsed." lastChild = self while hasattr(lastChild, 'contents') and lastChild.contents: lastChild = lastChild.contents[-1] return lastChild def insert(self, position, newChild): if (isinstance(newChild, basestring) or isinstance(newChild, unicode)) \ and not isinstance(newChild, NavigableString): newChild = NavigableString(newChild) position = min(position, len(self.contents)) if hasattr(newChild, 'parent') and newChild.parent != None: # We're 'inserting' an element that's already one # of this object's children. if newChild.parent == self: index = self.find(newChild) if index and index < position: # Furthermore we're moving it further down the # list of this object's children. That means that # when we extract this element, our target index # will jump down one. position = position - 1 newChild.extract() newChild.parent = self previousChild = None if position == 0: newChild.previousSibling = None newChild.previous = self else: previousChild = self.contents[position-1] newChild.previousSibling = previousChild newChild.previousSibling.nextSibling = newChild newChild.previous = previousChild._lastRecursiveChild() if newChild.previous: newChild.previous.next = newChild newChildsLastElement = newChild._lastRecursiveChild() if position >= len(self.contents): newChild.nextSibling = None parent = self parentsNextSibling = None while not parentsNextSibling: parentsNextSibling = parent.nextSibling parent = parent.parent if not parent: # This is the last element in the document. break if parentsNextSibling: newChildsLastElement.next = parentsNextSibling else: newChildsLastElement.next = None else: nextChild = self.contents[position] newChild.nextSibling = nextChild if newChild.nextSibling: newChild.nextSibling.previousSibling = newChild newChildsLastElement.next = nextChild if newChildsLastElement.next: newChildsLastElement.next.previous = newChildsLastElement self.contents.insert(position, newChild) def append(self, tag): """Appends the given tag to the contents of this tag.""" self.insert(len(self.contents), tag) def findNext(self, name=None, attrs={}, text=None, **kwargs): """Returns the first item that matches the given criteria and appears after this Tag in the document.""" return self._findOne(self.findAllNext, name, attrs, text, **kwargs) def findAllNext(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns all items that match the given criteria and appear after this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.nextGenerator, **kwargs) def findNextSibling(self, name=None, attrs={}, text=None, **kwargs): """Returns the closest sibling to this Tag that matches the given criteria and appears after this Tag in the document.""" return self._findOne(self.findNextSiblings, name, attrs, text, **kwargs) def findNextSiblings(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns the siblings of this Tag that match the given criteria and appear after this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.nextSiblingGenerator, **kwargs) fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x def findPrevious(self, name=None, attrs={}, text=None, **kwargs): """Returns the first item that matches the given criteria and appears before this Tag in the document.""" return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs) def findAllPrevious(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns all items that match the given criteria and appear before this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.previousGenerator, **kwargs) fetchPrevious = findAllPrevious # Compatibility with pre-3.x def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs): """Returns the closest sibling to this Tag that matches the given criteria and appears before this Tag in the document.""" return self._findOne(self.findPreviousSiblings, name, attrs, text, **kwargs) def findPreviousSiblings(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns the siblings of this Tag that match the given criteria and appear before this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.previousSiblingGenerator, **kwargs) fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x def findParent(self, name=None, attrs={}, **kwargs): """Returns the closest parent of this Tag that matches the given criteria.""" # NOTE: We can't use _findOne because findParents takes a different # set of arguments. r = None l = self.findParents(name, attrs, 1) if l: r = l[0] return r def findParents(self, name=None, attrs={}, limit=None, **kwargs): """Returns the parents of this Tag that match the given criteria.""" return self._findAll(name, attrs, None, limit, self.parentGenerator, **kwargs) fetchParents = findParents # Compatibility with pre-3.x #These methods do the real heavy lifting. def _findOne(self, method, name, attrs, text, **kwargs): r = None l = method(name, attrs, text, 1, **kwargs) if l: r = l[0] return r def _findAll(self, name, attrs, text, limit, generator, **kwargs): "Iterates over a generator looking for things that match." if isinstance(name, SoupStrainer): strainer = name else: # Build a SoupStrainer strainer = SoupStrainer(name, attrs, text, **kwargs) results = ResultSet(strainer) g = generator() while True: try: i = g.next() except StopIteration: break if i: found = strainer.search(i) if found: results.append(found) if limit and len(results) >= limit: break return results #These Generators can be used to navigate starting from both #NavigableStrings and Tags. def nextGenerator(self): i = self while i: i = i.next yield i def nextSiblingGenerator(self): i = self while i: i = i.nextSibling yield i def previousGenerator(self): i = self while i: i = i.previous yield i def previousSiblingGenerator(self): i = self while i: i = i.previousSibling yield i def parentGenerator(self): i = self while i: i = i.parent yield i # Utility methods def substituteEncoding(self, str, encoding=None): encoding = encoding or "utf-8" return str.replace("%SOUP-ENCODING%", encoding) def toEncoding(self, s, encoding=None): """Encodes an object to a string in some encoding, or to Unicode. .""" if isinstance(s, unicode): if encoding: s = s.encode(encoding) elif isinstance(s, str): if encoding: s = s.encode(encoding) else: s = unicode(s) else: if encoding: s = self.toEncoding(str(s), encoding) else: s = unicode(s) return s class NavigableString(unicode, PageElement): def __new__(cls, value): """Create a new NavigableString. When unpickling a NavigableString, this method is called with the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be passed in to the superclass's __new__ or the superclass won't know how to handle non-ASCII characters. """ if isinstance(value, unicode): return unicode.__new__(cls, value) return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) def __getnewargs__(self): return (NavigableString.__str__(self),) def __getattr__(self, attr): """text.string gives you text. This is for backwards compatibility for Navigable*String, but for CData* it lets you get the string without the CData wrapper.""" if attr == 'string': return self else: raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr) def __unicode__(self): return str(self).decode(DEFAULT_OUTPUT_ENCODING) def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): if encoding: return self.encode(encoding) else: return self class CData(NavigableString): def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): return "" % NavigableString.__str__(self, encoding) class ProcessingInstruction(NavigableString): def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): output = self if "%SOUP-ENCODING%" in output: output = self.substituteEncoding(output, encoding) return "" % self.toEncoding(output, encoding) class Comment(NavigableString): def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): return "" % NavigableString.__str__(self, encoding) class Declaration(NavigableString): def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): return "" % NavigableString.__str__(self, encoding) class Tag(PageElement): """Represents a found HTML tag with its attributes and contents.""" def _invert(h): "Cheap function to invert a hash." i = {} for k,v in h.items(): i[v] = k return i XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", "quot" : '"', "amp" : "&", "lt" : "<", "gt" : ">" } XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS) def _convertEntities(self, match): """Used in a call to re.sub to replace HTML, XML, and numeric entities with the appropriate Unicode characters. If HTML entities are being converted, any unrecognized entities are escaped.""" x = match.group(1) if self.convertHTMLEntities and x in name2codepoint: return unichr(name2codepoint[x]) elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS: if self.convertXMLEntities: return self.XML_ENTITIES_TO_SPECIAL_CHARS[x] else: return u'&%s;' % x elif len(x) > 0 and x[0] == '#': # Handle numeric entities if len(x) > 1 and x[1] == 'x': return unichr(int(x[2:], 16)) else: return unichr(int(x[1:])) elif self.escapeUnrecognizedEntities: return u'&%s;' % x else: return u'&%s;' % x def __init__(self, parser, name, attrs=None, parent=None, previous=None): "Basic constructor." # We don't actually store the parser object: that lets extracted # chunks be garbage-collected self.parserClass = parser.__class__ self.isSelfClosing = parser.isSelfClosingTag(name) self.name = name if attrs == None: attrs = [] self.attrs = attrs self.contents = [] self.setup(parent, previous) self.hidden = False self.containsSubstitutions = False self.convertHTMLEntities = parser.convertHTMLEntities self.convertXMLEntities = parser.convertXMLEntities self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities # Convert any HTML, XML, or numeric entities in the attribute values. convert = lambda(k, val): (k, re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);", self._convertEntities, val)) self.attrs = map(convert, self.attrs) def get(self, key, default=None): """Returns the value of the 'key' attribute for the tag, or the value given for 'default' if it doesn't have that attribute.""" return self._getAttrMap().get(key, default) def has_key(self, key): return self._getAttrMap().has_key(key) def __getitem__(self, key): """tag[key] returns the value of the 'key' attribute for the tag, and throws an exception if it's not there.""" return self._getAttrMap()[key] def __iter__(self): "Iterating over a tag iterates over its contents." return iter(self.contents) def __len__(self): "The length of a tag is the length of its list of contents." return len(self.contents) def __contains__(self, x): return x in self.contents def __nonzero__(self): "A tag is non-None even if it has no contents." return True def __setitem__(self, key, value): """Setting tag[key] sets the value of the 'key' attribute for the tag.""" self._getAttrMap() self.attrMap[key] = value found = False for i in range(0, len(self.attrs)): if self.attrs[i][0] == key: self.attrs[i] = (key, value) found = True if not found: self.attrs.append((key, value)) self._getAttrMap()[key] = value def __delitem__(self, key): "Deleting tag[key] deletes all 'key' attributes for the tag." for item in self.attrs: if item[0] == key: self.attrs.remove(item) #We don't break because bad HTML can define the same #attribute multiple times. self._getAttrMap() if self.attrMap.has_key(key): del self.attrMap[key] def __call__(self, *args, **kwargs): """Calling a tag like a function is the same as calling its findAll() method. Eg. tag('a') returns a list of all the A tags found within this tag.""" return apply(self.findAll, args, kwargs) def __getattr__(self, tag): #print "Getattr %s.%s" % (self.__class__, tag) if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3: return self.find(tag[:-3]) elif tag.find('__') != 0: return self.find(tag) raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag) def __eq__(self, other): """Returns true iff this tag has the same name, the same attributes, and the same contents (recursively) as the given tag. NOTE: right now this will return false if two tags have the same attributes in a different order. Should this be fixed?""" if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other): return False for i in range(0, len(self.contents)): if self.contents[i] != other.contents[i]: return False return True def __ne__(self, other): """Returns true iff this tag is not identical to the other tag, as defined in __eq__.""" return not self == other def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING): """Renders this tag as a string.""" return self.__str__(encoding) def __unicode__(self): return self.__str__(None) BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" + ")") def _sub_entity(self, x): """Used with a regular expression to substitute the appropriate XML entity for an XML special character.""" return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";" def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING, prettyPrint=False, indentLevel=0): """Returns a string or Unicode representation of this tag and its contents. To get Unicode, pass None for encoding. NOTE: since Python's HTML parser consumes whitespace, this method is not certain to reproduce the whitespace present in the original string.""" encodedName = self.toEncoding(self.name, encoding) attrs = [] if self.attrs: for key, val in self.attrs: fmt = '%s="%s"' if isString(val): if self.containsSubstitutions and '%SOUP-ENCODING%' in val: val = self.substituteEncoding(val, encoding) # The attribute value either: # # * Contains no embedded double quotes or single quotes. # No problem: we enclose it in double quotes. # * Contains embedded single quotes. No problem: # double quotes work here too. # * Contains embedded double quotes. No problem: # we enclose it in single quotes. # * Embeds both single _and_ double quotes. This # can't happen naturally, but it can happen if # you modify an attribute value after parsing # the document. Now we have a bit of a # problem. We solve it by enclosing the # attribute in single quotes, and escaping any # embedded single quotes to XML entities. if '"' in val: fmt = "%s='%s'" if "'" in val: # TODO: replace with apos when # appropriate. val = val.replace("'", "&squot;") # Now we're okay w/r/t quotes. But the attribute # value might also contain angle brackets, or # ampersands that aren't part of entities. We need # to escape those to XML entities too. val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val) attrs.append(fmt % (self.toEncoding(key, encoding), self.toEncoding(val, encoding))) close = '' closeTag = '' if self.isSelfClosing: close = ' /' else: closeTag = '' % encodedName indentTag, indentContents = 0, 0 if prettyPrint: indentTag = indentLevel space = (' ' * (indentTag-1)) indentContents = indentTag + 1 contents = self.renderContents(encoding, prettyPrint, indentContents) if self.hidden: s = contents else: s = [] attributeString = '' if attrs: attributeString = ' ' + ' '.join(attrs) if prettyPrint: s.append(space) s.append('<%s%s%s>' % (encodedName, attributeString, close)) if prettyPrint: s.append("\n") s.append(contents) if prettyPrint and contents and contents[-1] != "\n": s.append("\n") if prettyPrint and closeTag: s.append(space) s.append(closeTag) if prettyPrint and closeTag and self.nextSibling: s.append("\n") s = ''.join(s) return s def decompose(self): """Recursively destroys the contents of this tree.""" contents = [i for i in self.contents] for i in contents: if isinstance(i, Tag): i.decompose() else: i.extract() self.extract() def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING): return self.__str__(encoding, True) def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, prettyPrint=False, indentLevel=0): """Renders the contents of this tag as a string in the given encoding. If encoding is None, returns a Unicode string..""" s=[] for c in self: text = None if isinstance(c, NavigableString): text = c.__str__(encoding) elif isinstance(c, Tag): s.append(c.__str__(encoding, prettyPrint, indentLevel)) if text and prettyPrint: text = text.strip() if text: if prettyPrint: s.append(" " * (indentLevel-1)) s.append(text) if prettyPrint: s.append("\n") return ''.join(s) #Soup methods def find(self, name=None, attrs={}, recursive=True, text=None, **kwargs): """Return only the first child of this Tag matching the given criteria.""" r = None l = self.findAll(name, attrs, recursive, text, 1, **kwargs) if l: r = l[0] return r findChild = find def findAll(self, name=None, attrs={}, recursive=True, text=None, limit=None, **kwargs): """Extracts a list of Tag objects that match the given criteria. You can specify the name of the Tag and any attributes you want the Tag to have. The value of a key-value pair in the 'attrs' map can be a string, a list of strings, a regular expression object, or a callable that takes a string and returns whether or not the string matches for some custom definition of 'matches'. The same is true of the tag name.""" generator = self.recursiveChildGenerator if not recursive: generator = self.childGenerator return self._findAll(name, attrs, text, limit, generator, **kwargs) findChildren = findAll # Pre-3.x compatibility methods first = find fetch = findAll def fetchText(self, text=None, recursive=True, limit=None): return self.findAll(text=text, recursive=recursive, limit=limit) def firstText(self, text=None, recursive=True): return self.find(text=text, recursive=recursive) #Private methods def _getAttrMap(self): """Initializes a map representation of this tag's attributes, if not already initialized.""" if not getattr(self, 'attrMap'): self.attrMap = {} for (key, value) in self.attrs: self.attrMap[key] = value return self.attrMap #Generator methods def childGenerator(self): for i in range(0, len(self.contents)): yield self.contents[i] raise StopIteration def recursiveChildGenerator(self): stack = [(self, 0)] while stack: tag, start = stack.pop() if isinstance(tag, Tag): for i in range(start, len(tag.contents)): a = tag.contents[i] yield a if isinstance(a, Tag) and tag.contents: if i < len(tag.contents) - 1: stack.append((tag, i+1)) stack.append((a, 0)) break raise StopIteration # Next, a couple classes to represent queries and their results. class SoupStrainer: """Encapsulates a number of ways of matching a markup element (tag or text).""" def __init__(self, name=None, attrs={}, text=None, **kwargs): self.name = name if isString(attrs): kwargs['class'] = attrs attrs = None if kwargs: if attrs: attrs = attrs.copy() attrs.update(kwargs) else: attrs = kwargs self.attrs = attrs self.text = text def __str__(self): if self.text: return self.text else: return "%s|%s" % (self.name, self.attrs) def searchTag(self, markupName=None, markupAttrs={}): found = None markup = None if isinstance(markupName, Tag): markup = markupName markupAttrs = markup callFunctionWithTagData = callable(self.name) \ and not isinstance(markupName, Tag) if (not self.name) \ or callFunctionWithTagData \ or (markup and self._matches(markup, self.name)) \ or (not markup and self._matches(markupName, self.name)): if callFunctionWithTagData: match = self.name(markupName, markupAttrs) else: match = True markupAttrMap = None for attr, matchAgainst in self.attrs.items(): if not markupAttrMap: if hasattr(markupAttrs, 'get'): markupAttrMap = markupAttrs else: markupAttrMap = {} for k,v in markupAttrs: markupAttrMap[k] = v attrValue = markupAttrMap.get(attr) if not self._matches(attrValue, matchAgainst): match = False break if match: if markup: found = markup else: found = markupName return found def search(self, markup): #print 'looking for %s in %s' % (self, markup) found = None # If given a list of items, scan it for a text element that # matches. if isList(markup) and not isinstance(markup, Tag): for element in markup: if isinstance(element, NavigableString) \ and self.search(element): found = element break # If it's a Tag, make sure its name or attributes match. # Don't bother with Tags if we're searching for text. elif isinstance(markup, Tag): if not self.text: found = self.searchTag(markup) # If it's text, make sure the text matches. elif isinstance(markup, NavigableString) or \ isString(markup): if self._matches(markup, self.text): found = markup else: raise Exception, "I don't know how to match against a %s" \ % markup.__class__ return found def _matches(self, markup, matchAgainst): #print "Matching %s against %s" % (markup, matchAgainst) result = False if matchAgainst == True and type(matchAgainst) == types.BooleanType: result = markup != None elif callable(matchAgainst): result = matchAgainst(markup) else: #Custom match methods take the tag as an argument, but all #other ways of matching match the tag name as a string. if isinstance(markup, Tag): markup = markup.name if markup and not isString(markup): markup = unicode(markup) #Now we know that chunk is either a string, or None. if hasattr(matchAgainst, 'match'): # It's a regexp object. result = markup and matchAgainst.search(markup) elif isList(matchAgainst): result = markup in matchAgainst elif hasattr(matchAgainst, 'items'): result = markup.has_key(matchAgainst) elif matchAgainst and isString(markup): if isinstance(markup, unicode): matchAgainst = unicode(matchAgainst) else: matchAgainst = str(matchAgainst) if not result: result = matchAgainst == markup return result class ResultSet(list): """A ResultSet is just a list that keeps track of the SoupStrainer that created it.""" def __init__(self, source): list.__init__([]) self.source = source # Now, some helper functions. def isList(l): """Convenience method that works with all 2.x versions of Python to determine whether or not something is listlike.""" return hasattr(l, '__iter__') \ or (type(l) in (types.ListType, types.TupleType)) def isString(s): """Convenience method that works with all 2.x versions of Python to determine whether or not something is stringlike.""" try: return isinstance(s, unicode) or isinstance(s, basestring) except NameError: return isinstance(s, str) def buildTagMap(default, *args): """Turns a list of maps, lists, or scalars into a single map. Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and NESTING_RESET_TAGS maps out of lists and partial maps.""" built = {} for portion in args: if hasattr(portion, 'items'): #It's a map. Merge it. for k,v in portion.items(): built[k] = v elif isList(portion): #It's a list. Map each item to the default. for k in portion: built[k] = default else: #It's a scalar. Map it to the default. built[portion] = default return built # Now, the parser classes. class BeautifulStoneSoup(Tag, SGMLParser): """This class contains the basic parser and search code. It defines a parser that knows nothing about tag behavior except for the following: You can't close a tag without closing all the tags it encloses. That is, "" actually means "". [Another possible explanation is "", but since this class defines no SELF_CLOSING_TAGS, it will never use that explanation.] This class is useful for parsing XML or made-up markup languages, or when BeautifulSoup makes an assumption counter to what you were expecting.""" SELF_CLOSING_TAGS = {} NESTABLE_TAGS = {} RESET_NESTING_TAGS = {} QUOTE_TAGS = {} PRESERVE_WHITESPACE_TAGS = [] MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'), lambda x: x.group(1) + ' />'), (re.compile(']*)>'), lambda x: '') ] ROOT_TAG_NAME = u'[document]' HTML_ENTITIES = "html" XML_ENTITIES = "xml" XHTML_ENTITIES = "xhtml" # TODO: This only exists for backwards-compatibility ALL_ENTITIES = XHTML_ENTITIES # Used when determining whether a text node is all whitespace and # can be replaced with a single space. A text node that contains # fancy Unicode spaces (usually non-breaking) should be left # alone. STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, } def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None, markupMassage=True, smartQuotesTo=XML_ENTITIES, convertEntities=None, selfClosingTags=None, isHTML=False): """The Soup object is initialized as the 'root tag', and the provided markup (which can be a string or a file-like object) is fed into the underlying parser. sgmllib will process most bad HTML, and the BeautifulSoup class has some tricks for dealing with some HTML that kills sgmllib, but Beautiful Soup can nonetheless choke or lose data if your data uses self-closing tags or declarations incorrectly. By default, Beautiful Soup uses regexes to sanitize input, avoiding the vast majority of these problems. If the problems don't apply to you, pass in False for markupMassage, and you'll get better performance. The default parser massage techniques fix the two most common instances of invalid HTML that choke sgmllib:
(No space between name of closing tag and tag close) (Extraneous whitespace in declaration) You can pass in a custom list of (RE object, replace method) tuples to get Beautiful Soup to scrub your input the way you want.""" self.parseOnlyThese = parseOnlyThese self.fromEncoding = fromEncoding self.smartQuotesTo = smartQuotesTo self.convertEntities = convertEntities # Set the rules for how we'll deal with the entities we # encounter if self.convertEntities: # It doesn't make sense to convert encoded characters to # entities even while you're converting entities to Unicode. # Just convert it all to Unicode. self.smartQuotesTo = None if convertEntities == self.HTML_ENTITIES: self.convertXMLEntities = False self.convertHTMLEntities = True self.escapeUnrecognizedEntities = True elif convertEntities == self.XHTML_ENTITIES: self.convertXMLEntities = True self.convertHTMLEntities = True self.escapeUnrecognizedEntities = False elif convertEntities == self.XML_ENTITIES: self.convertXMLEntities = True self.convertHTMLEntities = False self.escapeUnrecognizedEntities = False else: self.convertXMLEntities = False self.convertHTMLEntities = False self.escapeUnrecognizedEntities = False self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags) SGMLParser.__init__(self) if hasattr(markup, 'read'): # It's a file-type object. markup = markup.read() self.markup = markup self.markupMassage = markupMassage try: self._feed(isHTML=isHTML) except StopParsing: pass self.markup = None # The markup can now be GCed def convert_charref(self, name): """This method fixes a bug in Python's SGMLParser.""" try: n = int(name) except ValueError: return if not 0 <= n <= 127 : # ASCII ends at 127, not 255 return return self.convert_codepoint(n) def _feed(self, inDocumentEncoding=None, isHTML=False): # Convert the document to Unicode. markup = self.markup if isinstance(markup, unicode): if not hasattr(self, 'originalEncoding'): self.originalEncoding = None else: dammit = UnicodeDammit\ (markup, [self.fromEncoding, inDocumentEncoding], smartQuotesTo=self.smartQuotesTo, isHTML=isHTML) markup = dammit.unicode self.originalEncoding = dammit.originalEncoding self.declaredHTMLEncoding = dammit.declaredHTMLEncoding if markup: if self.markupMassage: if not isList(self.markupMassage): self.markupMassage = self.MARKUP_MASSAGE for fix, m in self.markupMassage: markup = fix.sub(m, markup) # TODO: We get rid of markupMassage so that the # soup object can be deepcopied later on. Some # Python installations can't copy regexes. If anyone # was relying on the existence of markupMassage, this # might cause problems. del(self.markupMassage) self.reset() SGMLParser.feed(self, markup) # Close out any unfinished strings and close all the open tags. self.endData() while self.currentTag.name != self.ROOT_TAG_NAME: self.popTag() def __getattr__(self, methodName): """This method routes method call requests to either the SGMLParser superclass or the Tag superclass, depending on the method name.""" #print "__getattr__ called on %s.%s" % (self.__class__, methodName) if methodName.find('start_') == 0 or methodName.find('end_') == 0 \ or methodName.find('do_') == 0: return SGMLParser.__getattr__(self, methodName) elif methodName.find('__') != 0: return Tag.__getattr__(self, methodName) else: raise AttributeError def isSelfClosingTag(self, name): """Returns true iff the given string is the name of a self-closing tag according to this parser.""" return self.SELF_CLOSING_TAGS.has_key(name) \ or self.instanceSelfClosingTags.has_key(name) def reset(self): Tag.__init__(self, self, self.ROOT_TAG_NAME) self.hidden = 1 SGMLParser.reset(self) self.currentData = [] self.currentTag = None self.tagStack = [] self.quoteStack = [] self.pushTag(self) def popTag(self): tag = self.tagStack.pop() # Tags with just one string-owning child get the child as a # 'string' property, so that soup.tag.string is shorthand for # soup.tag.contents[0] if len(self.currentTag.contents) == 1 and \ isinstance(self.currentTag.contents[0], NavigableString): self.currentTag.string = self.currentTag.contents[0] #print "Pop", tag.name if self.tagStack: self.currentTag = self.tagStack[-1] return self.currentTag def pushTag(self, tag): #print "Push", tag.name if self.currentTag: self.currentTag.contents.append(tag) self.tagStack.append(tag) self.currentTag = self.tagStack[-1] def endData(self, containerClass=NavigableString): if self.currentData: currentData = u''.join(self.currentData) if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and not set([tag.name for tag in self.tagStack]).intersection( self.PRESERVE_WHITESPACE_TAGS)): if '\n' in currentData: currentData = '\n' else: currentData = ' ' self.currentData = [] if self.parseOnlyThese and len(self.tagStack) <= 1 and \ (not self.parseOnlyThese.text or \ not self.parseOnlyThese.search(currentData)): return o = containerClass(currentData) o.setup(self.currentTag, self.previous) if self.previous: self.previous.next = o self.previous = o self.currentTag.contents.append(o) def _popToTag(self, name, inclusivePop=True): """Pops the tag stack up to and including the most recent instance of the given tag. If inclusivePop is false, pops the tag stack up to but *not* including the most recent instqance of the given tag.""" #print "Popping to %s" % name if name == self.ROOT_TAG_NAME: return numPops = 0 mostRecentTag = None for i in range(len(self.tagStack)-1, 0, -1): if name == self.tagStack[i].name: numPops = len(self.tagStack)-i break if not inclusivePop: numPops = numPops - 1 for i in range(0, numPops): mostRecentTag = self.popTag() return mostRecentTag def _smartPop(self, name): """We need to pop up to the previous tag of this type, unless one of this tag's nesting reset triggers comes between this tag and the previous tag of this type, OR unless this tag is a generic nesting trigger and another generic nesting trigger comes between this tag and the previous tag of this type. Examples:

FooBar *

* should pop to 'p', not 'b'.

FooBar *

* should pop to 'table', not 'p'.

Foo

Bar *

* should pop to 'tr', not 'p'.

    • *
    • * should pop to 'ul', not the first 'li'.
  • ** should pop to 'table', not the first 'tr' tag should implicitly close the previous tag within the same
    ** should pop to 'tr', not the first 'td' """ nestingResetTriggers = self.NESTABLE_TAGS.get(name) isNestable = nestingResetTriggers != None isResetNesting = self.RESET_NESTING_TAGS.has_key(name) popTo = None inclusive = True for i in range(len(self.tagStack)-1, 0, -1): p = self.tagStack[i] if (not p or p.name == name) and not isNestable: #Non-nestable tags get popped to the top or to their #last occurance. popTo = name break if (nestingResetTriggers != None and p.name in nestingResetTriggers) \ or (nestingResetTriggers == None and isResetNesting and self.RESET_NESTING_TAGS.has_key(p.name)): #If we encounter one of the nesting reset triggers #peculiar to this tag, or we encounter another tag #that causes nesting to reset, pop up to but not #including that tag. popTo = p.name inclusive = False break p = p.parent if popTo: self._popToTag(popTo, inclusive) def unknown_starttag(self, name, attrs, selfClosing=0): #print "Start tag %s: %s" % (name, attrs) if self.quoteStack: #This is not a real tag. #print "<%s> is not real!" % name attrs = ''.join(map(lambda(x, y): ' %s="%s"' % (x, y), attrs)) self.handle_data('<%s%s>' % (name, attrs)) return self.endData() if not self.isSelfClosingTag(name) and not selfClosing: self._smartPop(name) if self.parseOnlyThese and len(self.tagStack) <= 1 \ and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)): return tag = Tag(self, name, attrs, self.currentTag, self.previous) if self.previous: self.previous.next = tag self.previous = tag self.pushTag(tag) if selfClosing or self.isSelfClosingTag(name): self.popTag() if name in self.QUOTE_TAGS: #print "Beginning quote (%s)" % name self.quoteStack.append(name) self.literal = 1 return tag def unknown_endtag(self, name): #print "End tag %s" % name if self.quoteStack and self.quoteStack[-1] != name: #This is not a real end tag. #print " is not real!" % name self.handle_data('' % name) return self.endData() self._popToTag(name) if self.quoteStack and self.quoteStack[-1] == name: self.quoteStack.pop() self.literal = (len(self.quoteStack) > 0) def handle_data(self, data): self.currentData.append(data) def _toStringSubclass(self, text, subclass): """Adds a certain piece of text to the tree as a NavigableString subclass.""" self.endData() self.handle_data(text) self.endData(subclass) def handle_pi(self, text): """Handle a processing instruction as a ProcessingInstruction object, possibly one with a %SOUP-ENCODING% slot into which an encoding will be plugged later.""" if text[:3] == "xml": text = u"xml version='1.0' encoding='%SOUP-ENCODING%'" self._toStringSubclass(text, ProcessingInstruction) def handle_comment(self, text): "Handle comments as Comment objects." self._toStringSubclass(text, Comment) def handle_charref(self, ref): "Handle character references as data." if self.convertEntities: data = unichr(int(ref)) else: data = '&#%s;' % ref self.handle_data(data) def handle_entityref(self, ref): """Handle entity references as data, possibly converting known HTML and/or XML entity references to the corresponding Unicode characters.""" data = None if self.convertHTMLEntities: try: data = unichr(name2codepoint[ref]) except KeyError: pass if not data and self.convertXMLEntities: data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref) if not data and self.convertHTMLEntities and \ not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref): # TODO: We've got a problem here. We're told this is # an entity reference, but it's not an XML entity # reference or an HTML entity reference. Nonetheless, # the logical thing to do is to pass it through as an # unrecognized entity reference. # # Except: when the input is "&carol;" this function # will be called with input "carol". When the input is # "AT&T", this function will be called with input # "T". We have no way of knowing whether a semicolon # was present originally, so we don't know whether # this is an unknown entity or just a misplaced # ampersand. # # The more common case is a misplaced ampersand, so I # escape the ampersand and omit the trailing semicolon. data = "&%s" % ref if not data: # This case is different from the one above, because we # haven't already gone through a supposedly comprehensive # mapping of entities to Unicode characters. We might not # have gone through any mapping at all. So the chances are # very high that this is a real entity, and not a # misplaced ampersand. data = "&%s;" % ref self.handle_data(data) def handle_decl(self, data): "Handle DOCTYPEs and the like as Declaration objects." self._toStringSubclass(data, Declaration) def parse_declaration(self, i): """Treat a bogus SGML declaration as raw data. Treat a CDATA declaration as a CData object.""" j = None if self.rawdata[i:i+9] == '', i) if k == -1: k = len(self.rawdata) data = self.rawdata[i+9:k] j = k+3 self._toStringSubclass(data, CData) else: try: j = SGMLParser.parse_declaration(self, i) except SGMLParseError: toHandle = self.rawdata[i:] self.handle_data(toHandle) j = i + len(toHandle) return j class BeautifulSoup(BeautifulStoneSoup): """This parser knows the following facts about HTML: * Some tags have no closing tag and should be interpreted as being closed as soon as they are encountered. * The text inside some tags (ie. 'script') may contain tags which are not really part of the document and which should be parsed as text, not tags. If you want to parse the text as tags, you can always fetch it and parse it explicitly. * Tag nesting rules: Most tags can't be nested at all. For instance, the occurance of a

    tag should implicitly close the previous

    tag.

    Para1

    Para2 should be transformed into:

    Para1

    Para2 Some tags can be nested arbitrarily. For instance, the occurance of a

    tag should _not_ implicitly close the previous
    tag. Alice said:
    Bob said:
    Blah should NOT be transformed into: Alice said:
    Bob said:
    Blah Some tags can be nested, but the nesting is reset by the interposition of other tags. For instance, a
    , but not close a tag in another table.
    BlahBlah should be transformed into:
    BlahBlah but, Blah
    Blah should NOT be transformed into Blah
    Blah Differing assumptions about tag nesting rules are a major source of problems with the BeautifulSoup class. If BeautifulSoup is not treating as nestable a tag your page author treats as nestable, try ICantBelieveItsBeautifulSoup, MinimalSoup, or BeautifulStoneSoup before writing your own subclass.""" def __init__(self, *args, **kwargs): if not kwargs.has_key('smartQuotesTo'): kwargs['smartQuotesTo'] = self.HTML_ENTITIES kwargs['isHTML'] = True BeautifulStoneSoup.__init__(self, *args, **kwargs) SELF_CLOSING_TAGS = buildTagMap(None, ['br' , 'hr', 'input', 'img', 'meta', 'spacer', 'link', 'frame', 'base']) PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea']) QUOTE_TAGS = {'script' : None, 'textarea' : None} #According to the HTML standard, each of these inline tags can #contain another tag of the same type. Furthermore, it's common #to actually use these tags this way. NESTABLE_INLINE_TAGS = ['span', 'font', 'q', 'object', 'bdo', 'sub', 'sup', 'center'] #According to the HTML standard, these block tags can contain #another tag of the same type. Furthermore, it's common #to actually use these tags this way. NESTABLE_BLOCK_TAGS = ['blockquote', 'div', 'fieldset', 'ins', 'del'] #Lists can contain other lists, but there are restrictions. NESTABLE_LIST_TAGS = { 'ol' : [], 'ul' : [], 'li' : ['ul', 'ol'], 'dl' : [], 'dd' : ['dl'], 'dt' : ['dl'] } #Tables can contain other tables, but there are restrictions. NESTABLE_TABLE_TAGS = {'table' : [], 'tr' : ['table', 'tbody', 'tfoot', 'thead'], 'td' : ['tr'], 'th' : ['tr'], 'thead' : ['table'], 'tbody' : ['table'], 'tfoot' : ['table'], } NON_NESTABLE_BLOCK_TAGS = ['address', 'form', 'p', 'pre'] #If one of these tags is encountered, all tags up to the next tag of #this type are popped. RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript', NON_NESTABLE_BLOCK_TAGS, NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS) NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS, NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS) # Used to detect the charset in a META tag; see start_meta CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M) def start_meta(self, attrs): """Beautiful Soup can detect a charset included in a META tag, try to convert the document to that charset, and re-parse the document from the beginning.""" httpEquiv = None contentType = None contentTypeIndex = None tagNeedsEncodingSubstitution = False for i in range(0, len(attrs)): key, value = attrs[i] key = key.lower() if key == 'http-equiv': httpEquiv = value elif key == 'content': contentType = value contentTypeIndex = i if httpEquiv and contentType: # It's an interesting meta tag. match = self.CHARSET_RE.search(contentType) if match: if (self.declaredHTMLEncoding is not None or self.originalEncoding == self.fromEncoding): # An HTML encoding was sniffed while converting # the document to Unicode, or an HTML encoding was # sniffed during a previous pass through the # document, or an encoding was specified # explicitly and it worked. Rewrite the meta tag. def rewrite(match): return match.group(1) + "%SOUP-ENCODING%" newAttr = self.CHARSET_RE.sub(rewrite, contentType) attrs[contentTypeIndex] = (attrs[contentTypeIndex][0], newAttr) tagNeedsEncodingSubstitution = True else: # This is our first pass through the document. # Go through it again with the encoding information. newCharset = match.group(3) if newCharset and newCharset != self.originalEncoding: self.declaredHTMLEncoding = newCharset self._feed(self.declaredHTMLEncoding) raise StopParsing pass tag = self.unknown_starttag("meta", attrs) if tag and tagNeedsEncodingSubstitution: tag.containsSubstitutions = True class StopParsing(Exception): pass class ICantBelieveItsBeautifulSoup(BeautifulSoup): """The BeautifulSoup class is oriented towards skipping over common HTML errors like unclosed tags. However, sometimes it makes errors of its own. For instance, consider this fragment: FooBar This is perfectly valid (if bizarre) HTML. However, the BeautifulSoup class will implicitly close the first b tag when it encounters the second 'b'. It will think the author wrote "FooBar", and didn't close the first 'b' tag, because there's no real-world reason to bold something that's already bold. When it encounters '' it will close two more 'b' tags, for a grand total of three tags closed instead of two. This can throw off the rest of your document structure. The same is true of a number of other tags, listed below. It's much more common for someone to forget to close a 'b' tag than to actually use nested 'b' tags, and the BeautifulSoup class handles the common case. This class handles the not-co-common case: where you can't believe someone wrote what they did, but it's valid HTML and BeautifulSoup screwed up by assuming it wouldn't be.""" I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \ ['em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong', 'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b', 'big'] I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ['noscript'] NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS, I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS, I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS) class MinimalSoup(BeautifulSoup): """The MinimalSoup class is for parsing HTML that contains pathologically bad markup. It makes no assumptions about tag nesting, but it does know which tags are self-closing, that ") html.append("""
    """) html = "\n".join(html) header = self._html_widget.build_header(html) return header def _load_entry_block(self, entry_list, mark_read=False, force=False): #if not forcing, load what we can from cache entries = [] if not force: l = [row for row in entry_list if self._entry_store.has_key(row[0])] for row in l: entries.append(self._entry_store[row[0]][1]) entry_list.remove(row) #load the rest from db if len(entry_list) > 0: e_id_list = [r[0] for r in entry_list] db_entries = self._db.get_entry_block(e_id_list, self._ajax_url) media = self._db.get_entry_media_block(e_id_list) for item in db_entries: if media.has_key(item['entry_id']): item['media'] = media[item['entry_id']] else: item['media'] = [] item['new'] = not item['read'] if mark_read and not item.has_key('media'): item['read'] = True if self._state == S_SEARCH: item['feed_title'] = self._db.get_feed_title(item['feed_id']) self._entry_store[item['entry_id']] = (self._search_formatter.htmlify_item(item, self._convert_newlines),item) else: self._entry_store[item['entry_id']] = (self._entry_formatter.htmlify_item(item, self._convert_newlines),item) #only reformat if read status changes for item in entries: item['new'] = not item['read'] if mark_read and not item.has_key('media'): item['read'] = True if self._state == S_SEARCH: item['feed_title'] = self._db.get_feed_title(item['feed_id']) self._entry_store[item['entry_id']] = (self._search_formatter.htmlify_item(item, self._convert_newlines),item) else: self._entry_store[item['entry_id']] = (self._entry_formatter.htmlify_item(item, self._convert_newlines),item) def _load_entry(self, entry_id, force=False): if self._entry_store.has_key(entry_id) and not force: return self._entry_store[entry_id] item = self._db.get_entry(entry_id, self._ajax_url) media = self._db.get_entry_media(entry_id) if media: item['media']=media else: item['media'] = [] item['new'] = not item['read'] if self._state == S_SEARCH: item['feed_title'] = self._db.get_feed_title(item['feed_id']) new_format = self._search_formatter.htmlify_item(item, self._convert_newlines) if self._entry_store.has_key(entry_id): if new_format == self._entry_store[entry_id][0]: self._entry_store[entry_id] = (new_format, item) return self._entry_store[entry_id] self._entry_store[entry_id] = (new_format, item) else: new_format = self._entry_formatter.htmlify_item(item, self._convert_newlines) if self._entry_store.has_key(entry_id): if new_format == self._entry_store[entry_id][0]: #if the new formatting is the same as the old, don't do anything different self._entry_store[entry_id] = (new_format, item) return self._entry_store[entry_id] self._entry_store[entry_id] = (new_format, item) try: index = self._entrylist.index((entry_id,self._current_feed_id)) except: logging.warning("Told to update an entry we don't have -- can't update") return self._entry_store[entry_id] if index >= self._first_entry and index <= self._first_entry+ENTRIES_PER_PAGE: entry = self._entry_store[entry_id][1] if self._USING_AJAX: ret = [] ret.append(str(entry_id)+" ") ret.append(self._entry_store[entry_id][0]) ret = "".join(ret) self._update_server.push_update(ret) else: self._render_entries() gobject.timeout_add(2000, self._do_delayed_set_viewed, self._current_feed_id, self._first_entry, self._last_entry, True) return self._entry_store[entry_id] def _update_entry(self, entry_id, item, show_change): if self._state == S_SEARCH: self._entry_store[entry_id] = (self._search_formatter.htmlify_item(item, self._convert_newlines),item) else: self._entry_store[entry_id] = (self._entry_formatter.htmlify_item(item, self._convert_newlines),item) i=0 for e,f in self._entrylist: if e == entry_id: index = i i += 1 if not show_change: return if index >= self._first_entry and index <= self._first_entry+ENTRIES_PER_PAGE: if self._USING_AJAX: ret = [] ret.append(str(entry_id)+" ") ret.append(self._entry_store[entry_id][0]) ret = "".join(ret) self._update_server.push_update(ret) def _render(self, html): image_id = None if self._renderer == EntryFormatter.GTKHTML: image_id = self.get_display_id() self._html_widget.render(html, self._ajax_url, image_id) def _do_delayed_set_viewed(self, feed_id, first_entry, last_entry, show_change=False): if (feed_id, first_entry, last_entry) != \ (self._current_feed_id, self._first_entry, self._last_entry): return False keepers = [] if self._filter_feed is not None: assert self._state == S_SEARCH entrylist = [r for r in self._entrylist if r[1] == self._filter_feed] else: entrylist = self._entrylist self._load_entry_block(entrylist[self._first_entry:self._last_entry]) for entry_id, f in entrylist[self._first_entry:self._last_entry]: item = self._entry_store[entry_id][1] if not item['read'] and not item['keep'] and len(item['media']) == 0: keepers.append(item) for item in keepers: item['read'] = True item['new'] = False self._update_entry(item['entry_id'], item, show_change) if len(keepers) > 0: if self._state == S_SEARCH: return False # if feed_id == -1: # for item in keepers: # self.emit('entries-viewed', [(item['feed_id'], [item['entry_id']])]) # return False self.emit('entries-viewed', [(feed_id, [e['entry_id'] for e in keepers])]) return False def _hulahop_prop_changed(self, obj, pspec): if pspec.name == 'status': self._main_window.display_status_message(self._moz.get_property('status')) def _do_context_menu(self, entry_id): """pops up a context menu for the designated item""" # When we right click on an item, we also get an event for the whole # document, so ignore that one. if entry_id == 0: if self._ignore_next_event: self._ignore_next_event = False return else: self._ignore_next_event = True menu = gtk.Menu() if entry_id == 0 and self._state == S_SEARCH: return if entry_id > 0: try: entry = self._load_entry(entry_id)[1] except ptvDB.NoEntry: return item = gtk.MenuItem(_("_Open in Browser...")) item.connect('activate', lambda e: self._app.activate_link(entry['link'])) menu.append(item) #separator = gtk.SeparatorMenuItem() #menu.append(separator) entry['flag'] = self._db.get_entry_flag(entry_id) if entry['flag'] & ptvDB.F_MEDIA: if entry['flag'] & ptvDB.F_DOWNLOADED == 0: item = gtk.ImageMenuItem(_("_Download")) img = gtk.image_new_from_stock('gtk-go-down',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate', lambda e,i: self._app.download_entry(i), entry_id) menu.append(item) else: item = gtk.ImageMenuItem(_("_Re-Download")) img = gtk.image_new_from_stock('gtk-go-down',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate', lambda e,i: self._app.download_entry(i), entry_id) menu.append(item) item = gtk.ImageMenuItem('gtk-media-play') item.connect('activate', lambda e,i: self._app.play_entry(i), entry_id) menu.append(item) item = gtk.MenuItem(_("Delete")) item.connect('activate', lambda e,i: self._app.delete_entry_media(i), entry_id) menu.append(item) if entry['flag'] & ptvDB.F_UNVIEWED: item = gtk.MenuItem(_("Mark As _Viewed")) item.connect('activate', lambda e,i: self._app.mark_entry_as_viewed(i), entry_id) menu.append(item) else: item = gtk.MenuItem(_("Mark As _Unviewed")) item.connect('activate', lambda e,i: self._app.mark_entry_as_unviewed(i), entry_id) menu.append(item) keep = self._db.get_entry_keep(entry['entry_id']) if keep: item = gtk.MenuItem(_("_Don't Keep New")) item.connect('activate', lambda e,i: self._app.activate_link("unkeep:%i" % (i,)), entry_id) menu.append(item) else: item = gtk.MenuItem(_("_Keep New")) item.connect('activate', lambda e,i: self._app.activate_link("keep:%i" % (i,)), entry_id) menu.append(item) if self._state != S_SEARCH: separator = gtk.SeparatorMenuItem() menu.append(separator) if self._state != S_SEARCH: if self._hide_viewed: item = gtk.MenuItem(_("_Show All")) item.connect('activate', self._toggle_hide_viewed) menu.append(item) else: item = gtk.MenuItem(_("_Hide Viewed Entries")) item.connect('activate', self._toggle_hide_viewed) menu.append(item) menu.show_all() menu.popup(None,None,None, 3, 0) def _link_clicked(self, link): if link == "planet:up": self._do_planet_up() elif link == "planet:down": self._do_planet_down() elif link == "pane:back": self._do_pane_back() elif link.startswith("rightclick"): self._do_context_menu(int(link.split(':')[1])) else: self.emit('link-activated', link) def _do_pane_back(self, a=None): self._main_window.pane_to_feeds() def _do_planet_up(self, a=None): self._first_entry -= ENTRIES_PER_PAGE self._html_widget.dl_interrupt() self._render_entries(mark_read=True) def _do_planet_down(self, a=None): self._first_entry += ENTRIES_PER_PAGE self._html_widget.dl_interrupt() self._render_entries(mark_read=True) def set_hide_viewed(self, state): if state == self._hide_viewed: return self._toggle_hide_viewed() def _toggle_hide_viewed(self, e=None): if self._hide_viewed: self._hide_viewed = False else: self._hide_viewed = True self._main_window.set_hide_entries_menuitem(self._hide_viewed) self._first_entry = 0 self._render_entries() PenguinTV-4.2.0/penguintv/ptv_sync.py0000755000000000000000000001241010717214416014524 0ustar #!/usr/bin/python #runs standalone, or can be accessed as a class for use in penguintv import os,os.path,sys import string import glob import os.path import shutil import getopt import utils try: import penguintv import ptvDB except: sys.path.insert(0, find_penguintv_lib()) #this will bomb if necessry import penguintv import ptvDB class ptv_sync: def __init__(self,dest_dir, delete=False, move=False, audio=False, dryrun=False): self.dest_dir = dest_dir self.audio = audio self.delete = delete self.move = move self.dryrun = dryrun self.cancel = False def interrupt(self): self.cancel = True def sync_gen(self): """generator yields cur item number, total, and message. If total is -1, unmeasured progress""" db = ptvDB.ptvDB() feedlist = db.get_feedlist() locallist = [] for feed in feedlist: if self.cancel: break entrylist = db.get_entrylist(feed[0]) for entry in entrylist: if self.cancel: break medialist = db.get_entry_media(entry[0]) if medialist: for medium in medialist: yield (0,-1,_("Building file list..."), None) if medium['file']: if self.audio == True: if medium['file'].rsplit(".",1)[-1].upper() not in ("MP3","OGG","FLAC","WMA","M4A"): continue try: source_size = os.stat(medium['file'])[6] except: continue locallist.append([feed[1],medium['file'],source_size, medium['media_id']]) if not self.move: db._c.close() #ug db._db.close() #yuck if self.delete: for root,dirs,files in os.walk(self.dest_dir): if self.cancel: break i=-1 for f in files: if self.cancel: break i+=1 if f not in [os.path.split(l[1])[1] for l in locallist]: d = {'filename': os.path.join(str(root),str(f))} yield (0,-1,_("Removing %(filename)s") % d, None) if self.dryrun==False: os.remove(os.path.join(str(root),str(f))) i=-1 for f in locallist: i+=1 if self.cancel: break filename = os.path.split(f[1])[1] sub_dir = os.path.join(self.dest_dir,f[0]) sub_dir = sub_dir.replace(":","_") if self.dryrun==False: try: os.mkdir(sub_dir) except OSError,e: if e.errno == 17: pass else: print "couldn't create dir:"+str(sub_dir) continue try: dest_size = os.stat(os.path.join(sub_dir,filename))[6] if f[2] == dest_size: yield (i, len(locallist), _("%(filename)s already exists") % d, f[3]) continue except: pass d = {'filename': filename} yield (i, len(locallist), _("Copying %(filename)s") % d, f[3]) if self.dryrun==False: shutil.copyfile(f[1], os.path.join(sub_dir,filename)) if self.move: db._c.close() #ug db._db.close() #yuck if self.delete and not self.cancel: for root,dirs,files in os.walk(self.dest_dir): for d in dirs: globlist = glob.glob(os.path.join(self.dest_dir,d,"*")) if len(globlist)==0: #empty dir yield (0, -1, _("Removing empty folders..."), None) if not self.dryrun: utils.deltree(os.path.join(self.dest_dir,d)) if self.cancel: yield (100,100,_("Synchronization cancelled"), None) else: yield (100,100,_("Copying Complete"), None) def find_penguintv_lib(): if os.environ.has_key("PENGUINTV_LIB"): return os.environ["PENGUINTV_LIB"] for d in sys.path: sd = os.path.join(d, 'penguintv') if os.path.isdir(sd): return sd print sys.argv[0] h, t = os.path.split(os.path.split(os.path.abspath(sys.argv[0]))[0]) if t == 'bin': libdir = os.path.join(h, 'lib') fp = os.path.join(libdir, 'penguintv') if os.path.isdir(fp): return libdir raise "FileNotFoundError", "couldn't find penguintv library dir" if __name__ == '__main__': # Here starts the dynamic part of the program dest_dir = "" delete = False dryrun = False audio = False move = False opts, args = getopt.getopt(sys.argv[1:], "andmp:","path=") for o, a in opts: if o == "-d": delete = True elif o == "-m": move = True elif o == "-n": print "Dry Run" dryrun = True elif o in ("-p", "--path"): dest_dir = a elif o == "-a": audio = True if dest_dir=="": print """ ptv_sync.py (-n) (-d) (-m) -p [destination]: Synchronizes a penguintv media directory with another directory. It doesn't just copy the files, however, it builds a different directory tree based on the feed name instead of the date of download. Options: -p or --path= Set the destination folder. This option must be set. -d Use to delete files on the remote end that don't exist in the penguintv media directory -m Move, don't copy. Deletes media from the penguintv database after copy is complete. -n Dry run. Demonstrates what would happen, but doesn't perform any copy or delete actions. -a Audio mode. Copy only audio files""" sys.exit(1) s = ptv_sync(dest_dir, delete, move, audio, dryrun) last_message = "" for item in s.sync_gen(): if item[2] != last_message: print item[2] last_message = item[2] PenguinTV-4.2.0/penguintv/ptvbittorrent/0000755000000000000000000000000011450514774015237 5ustar PenguinTV-4.2.0/penguintv/ptvbittorrent/Downloader.py0000644000000000000000000004250610646750246017720 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from CurrentRateMeasure import Measure from random import shuffle from time import time from bitfield import Bitfield class SingleDownload: def __init__(self, downloader, connection): self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.have = Bitfield(downloader.numpieces) self.last = 0 self.example_interest = None def disconnected(self): self.downloader.downloads.remove(self) for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.lost_have(i) self._letgo() def _letgo(self): if not self.active_requests: return if self.downloader.storage.is_endgame(): self.active_requests = [] return lost = [] for index, begin, length in self.active_requests: self.downloader.storage.request_lost(index, begin, length) if index not in lost: lost.append(index) self.active_requests = [] ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more(lost) for d in self.downloader.downloads: if d.choked and not d.interested: for l in lost: if d.have[l] and self.downloader.storage.do_I_have_requests(l): d.interested = True d.connection.send_interested() break def got_choke(self): if not self.choked: self.choked = True self._letgo() def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more() def is_choked(self): return self.choked def is_interested(self): return self.interested def got_piece(self, index, begin, piece): try: self.active_requests.remove((index, begin, len(piece))) except ValueError: return False if self.downloader.storage.is_endgame(): self.downloader.all_requests.remove((index, begin, len(piece))) self.last = time() self.measure.update_rate(len(piece)) self.downloader.measurefunc(len(piece)) self.downloader.downmeasure.update_rate(len(piece)) if not self.downloader.storage.piece_came_in(index, begin, piece): if self.downloader.storage.is_endgame(): while self.downloader.storage.do_I_have_requests(index): nb, nl = self.downloader.storage.new_request(index) self.downloader.all_requests.append((index, nb, nl)) for d in self.downloader.downloads: d.fix_download_endgame() return False self.downloader.picker.bump(index) ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more([index]) return False if self.downloader.storage.do_I_have(index): self.downloader.picker.complete(index) if self.downloader.storage.is_endgame(): for d in self.downloader.downloads: if d is not self and d.interested: if d.choked: d.fix_download_endgame() else: try: d.active_requests.remove((index, begin, len(piece))) except ValueError: continue d.connection.send_cancel(index, begin, len(piece)) d.fix_download_endgame() self._request_more() if self.downloader.picker.am_I_complete(): for d in [i for i in self.downloader.downloads if i.have.numfalse == 0]: d.connection.close() return self.downloader.storage.do_I_have(index) def _want(self, index): return self.have[index] and self.downloader.storage.do_I_have_requests(index) def _request_more(self, indices = None): assert not self.choked if len(self.active_requests) == self.downloader.backlog: return if self.downloader.storage.is_endgame(): self.fix_download_endgame() return lost_interests = [] while len(self.active_requests) < self.downloader.backlog: if indices is None: interest = self.downloader.picker.next(self._want, self.have.numfalse == 0) else: interest = None for i in indices: if self.have[i] and self.downloader.storage.do_I_have_requests(i): interest = i break if interest is None: break if not self.interested: self.interested = True self.connection.send_interested() self.example_interest = interest begin, length = self.downloader.storage.new_request(interest) self.downloader.picker.requested(interest, self.have.numfalse == 0) self.active_requests.append((interest, begin, length)) self.connection.send_request(interest, begin, length) if not self.downloader.storage.do_I_have_requests(interest): lost_interests.append(interest) if not self.active_requests and self.interested: self.interested = False self.connection.send_not_interested() if lost_interests: for d in self.downloader.downloads: if d.active_requests or not d.interested: continue if d.example_interest is not None and self.downloader.storage.do_I_have_requests(d.example_interest): continue for lost in lost_interests: if d.have[lost]: break else: continue interest = self.downloader.picker.next(d._want, d.have.numfalse == 0) if interest is None: d.interested = False d.connection.send_not_interested() else: d.example_interest = interest if self.downloader.storage.is_endgame(): self.downloader.all_requests = [] for d in self.downloader.downloads: self.downloader.all_requests.extend(d.active_requests) for d in self.downloader.downloads: d.fix_download_endgame() def fix_download_endgame(self): want = [a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests] if self.interested and not self.active_requests and not want: self.interested = False self.connection.send_not_interested() return if not self.interested and want: self.interested = True self.connection.send_interested() if self.choked: return shuffle(want) del want[self.downloader.backlog - len(self.active_requests):] self.active_requests.extend(want) for piece, begin, length in want: self.connection.send_request(piece, begin, length) def got_have(self, index): if self.have[index]: return self.have[index] = True self.downloader.picker.got_have(index) if self.downloader.picker.am_I_complete() and self.have.numfalse == 0: self.connection.close() return if self.downloader.storage.is_endgame(): self.fix_download_endgame() elif self.downloader.storage.do_I_have_requests(index): if not self.choked: self._request_more([index]) else: if not self.interested: self.interested = True self.connection.send_interested() def got_have_bitfield(self, have): self.have = have for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.got_have(i) if self.downloader.picker.am_I_complete() and self.have.numfalse == 0: self.connection.close() return if self.downloader.storage.is_endgame(): for piece, begin, length in self.downloader.all_requests: if self.have[piece]: self.interested = True self.connection.send_interested() return for i in xrange(len(self.have)): if self.have[i] and self.downloader.storage.do_I_have_requests(i): self.interested = True self.connection.send_interested() return def get_rate(self): return self.measure.get_rate() def is_snubbed(self): return time() - self.last > self.downloader.snub_time class Downloader: def __init__(self, storage, picker, backlog, max_rate_period, numpieces, downmeasure, snub_time, measurefunc = lambda x: None): self.storage = storage self.picker = picker self.backlog = backlog self.max_rate_period = max_rate_period self.downmeasure = downmeasure self.numpieces = numpieces self.snub_time = snub_time self.measurefunc = measurefunc self.downloads = [] def make_download(self, connection): self.downloads.append(SingleDownload(self, connection)) return self.downloads[-1] class DummyPicker: def __init__(self, num, r): self.stuff = range(num) self.r = r def next(self, wantfunc, seed): for i in self.stuff: if wantfunc(i): return i return None def lost_have(self, pos): self.r.append('lost have') def got_have(self, pos): self.r.append('got have') def requested(self, pos, seed): self.r.append('requested') def complete(self, pos): self.stuff.remove(pos) self.r.append('complete') def am_I_complete(self): return False def bump(self, i): pass class DummyStorage: def __init__(self, remaining, have_endgame = False, numpieces = 1): self.remaining = remaining self.active = [[] for i in xrange(numpieces)] self.endgame = False self.have_endgame = have_endgame def do_I_have_requests(self, index): return self.remaining[index] != [] def request_lost(self, index, begin, length): x = (begin, length) self.active[index].remove(x) self.remaining[index].append(x) self.remaining[index].sort() def piece_came_in(self, index, begin, piece): self.active[index].remove((begin, len(piece))) return True def do_I_have(self, index): return (self.remaining[index] == [] and self.active[index] == []) def new_request(self, index): x = self.remaining[index].pop() for i in self.remaining: if i: break else: self.endgame = True self.active[index].append(x) self.active[index].sort() return x def is_endgame(self): return self.have_endgame and self.endgame class DummyConnection: def __init__(self, events): self.events = events def send_interested(self): self.events.append('interested') def send_not_interested(self): self.events.append('not interested') def send_request(self, index, begin, length): self.events.append(('request', index, begin, length)) def send_cancel(self, index, begin, length): self.events.append(('cancel', index, begin, length)) def test_stops_at_backlog(): ds = DummyStorage([[(0, 2), (2, 2), (4, 2), (6, 2)]]) events = [] d = Downloader(ds, DummyPicker(len(ds.remaining), events), 2, 15, 1, Measure(15), 10) sd = d.make_download(DummyConnection(events)) assert events == [] assert ds.remaining == [[(0, 2), (2, 2), (4, 2), (6, 2)]] assert ds.active == [[]] sd.got_have_bitfield(Bitfield(1, chr(0x80))) assert events == ['got have', 'interested'] del events[:] assert ds.remaining == [[(0, 2), (2, 2), (4, 2), (6, 2)]] assert ds.active == [[]] sd.got_unchoke() assert events == ['requested', ('request', 0, 6, 2), 'requested', ('request', 0, 4, 2)] del events[:] assert ds.remaining == [[(0, 2), (2, 2)]] assert ds.active == [[(4, 2), (6, 2)]] sd.got_piece(0, 4, 'ab') assert events == ['requested', ('request', 0, 2, 2)] del events[:] assert ds.remaining == [[(0, 2)]] assert ds.active == [[(2, 2), (6, 2)]] def test_got_have_single(): ds = DummyStorage([[(0, 2)]]) events = [] d = Downloader(ds, DummyPicker(len(ds.remaining), events), 2, 15, 1, Measure(15), 10) sd = d.make_download(DummyConnection(events)) assert events == [] assert ds.remaining == [[(0, 2)]] assert ds.active == [[]] sd.got_unchoke() assert events == [] assert ds.remaining == [[(0, 2)]] assert ds.active == [[]] sd.got_have(0) assert events == ['got have', 'interested', 'requested', ('request', 0, 0, 2)] del events[:] assert ds.remaining == [[]] assert ds.active == [[(0, 2)]] sd.disconnected() assert events == ['lost have'] def test_choke_clears_active(): ds = DummyStorage([[(0, 2)]]) events = [] d = Downloader(ds, DummyPicker(len(ds.remaining), events), 2, 15, 1, Measure(15), 10) sd1 = d.make_download(DummyConnection(events)) sd2 = d.make_download(DummyConnection(events)) assert events == [] assert ds.remaining == [[(0, 2)]] assert ds.active == [[]] sd1.got_unchoke() sd1.got_have(0) assert events == ['got have', 'interested', 'requested', ('request', 0, 0, 2)] del events[:] assert ds.remaining == [[]] assert ds.active == [[(0, 2)]] sd2.got_unchoke() sd2.got_have(0) assert events == ['got have'] del events[:] assert ds.remaining == [[]] assert ds.active == [[(0, 2)]] sd1.got_choke() assert events == ['interested', 'requested', ('request', 0, 0, 2), 'not interested'] del events[:] assert ds.remaining == [[]] assert ds.active == [[(0, 2)]] sd2.got_piece(0, 0, 'ab') assert events == ['complete', 'not interested'] del events[:] assert ds.remaining == [[]] assert ds.active == [[]] def test_endgame(): ds = DummyStorage([[(0, 2)], [(0, 2)], [(0, 2)]], True, 3) events = [] d = Downloader(ds, DummyPicker(len(ds.remaining), events), 10, 15, 3, Measure(15), 10) ev1 = [] ev2 = [] ev3 = [] ev4 = [] sd1 = d.make_download(DummyConnection(ev1)) sd2 = d.make_download(DummyConnection(ev2)) sd3 = d.make_download(DummyConnection(ev3)) sd1.got_unchoke() sd1.got_have(0) assert ev1 == ['interested', ('request', 0, 0, 2)] del ev1[:] sd2.got_unchoke() sd2.got_have(0) sd2.got_have(1) assert ev2 == ['interested', ('request', 1, 0, 2)] del ev2[:] sd3.got_unchoke() sd3.got_have(0) sd3.got_have(1) sd3.got_have(2) assert (ev3 == ['interested', ('request', 2, 0, 2), ('request', 0, 0, 2), ('request', 1, 0, 2)] or ev3 == ['interested', ('request', 2, 0, 2), ('request', 1, 0, 2), ('request', 0, 0, 2)]) del ev3[:] assert ev2 == [('request', 0, 0, 2)] del ev2[:] sd2.got_piece(0, 0, 'ab') assert ev1 == [('cancel', 0, 0, 2), 'not interested'] del ev1[:] assert ev2 == [] assert ev3 == [('cancel', 0, 0, 2)] del ev3[:] sd3.got_choke() assert ev1 == [] assert ev2 == [] assert ev3 == [] sd3.got_unchoke() assert (ev3 == [('request', 2, 0, 2), ('request', 1, 0, 2)] or ev3 == [('request', 1, 0, 2), ('request', 2, 0, 2)]) del ev3[:] assert ev1 == [] assert ev2 == [] sd4 = d.make_download(DummyConnection(ev4)) sd4.got_have_bitfield([True, True, True]) assert ev4 == ['interested'] del ev4[:] sd4.got_unchoke() assert (ev4 == [('request', 2, 0, 2), ('request', 1, 0, 2)] or ev4 == [('request', 1, 0, 2), ('request', 2, 0, 2)]) assert ev1 == [] assert ev2 == [] assert ev3 == [] def test_stops_at_backlog_endgame(): ds = DummyStorage([[(2, 2), (0, 2)], [(2, 2), (0, 2)], [(0, 2)]], True, 3) events = [] d = Downloader(ds, DummyPicker(len(ds.remaining), events), 3, 15, 3, Measure(15), 10) ev1 = [] ev2 = [] ev3 = [] sd1 = d.make_download(DummyConnection(ev1)) sd2 = d.make_download(DummyConnection(ev2)) sd3 = d.make_download(DummyConnection(ev3)) sd1.got_unchoke() sd1.got_have(0) assert ev1 == ['interested', ('request', 0, 0, 2), ('request', 0, 2, 2)] del ev1[:] sd2.got_unchoke() sd2.got_have(0) assert ev2 == [] sd2.got_have(1) assert ev2 == ['interested', ('request', 1, 0, 2), ('request', 1, 2, 2)] del ev2[:] sd3.got_unchoke() sd3.got_have(2) assert (ev2 == [('request', 0, 0, 2)] or ev2 == [('request', 0, 2, 2)]) n = ev2[0][2] del ev2[:] sd1.got_piece(0, n, 'ab') assert ev1 == [] assert ev2 == [('cancel', 0, n, 2), ('request', 0, 2-n, 2)] PenguinTV-4.2.0/penguintv/ptvbittorrent/spewout.py0000644000000000000000000000211110646750246017314 0ustar # This file created for Debian because btdownloadcurses can't # find btdownloadheadless because we rename it. def print_spew(spew): s = StringIO() s.write('\n\n\n') for c in spew: s.write('%20s ' % c['ip']) if c['initiation'] == 'local': s.write('l') else: s.write('r') rate, interested, choked = c['upload'] s.write(' %10s ' % str(int(rate))) if c['is_optimistic_unchoke']: s.write('*') else: s.write(' ') if interested: s.write('i') else: s.write(' ') if choked: s.write('c') else: s.write(' ') rate, interested, choked, snubbed = c['download'] s.write(' %10s ' % str(int(rate))) if interested: s.write('i') else: s.write(' ') if choked: s.write('c') else: s.write(' ') if snubbed: s.write('s') else: s.write(' ') s.write('\n') print s.getvalue() PenguinTV-4.2.0/penguintv/ptvbittorrent/RawServer.py0000644000000000000000000004403310646750246017537 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from bisect import insort import socket from cStringIO import StringIO from traceback import print_exc from errno import EWOULDBLOCK, ENOBUFS try: from select import poll, error, POLLIN, POLLOUT, POLLERR, POLLHUP timemult = 1000 except ImportError: from selectpoll import poll, error, POLLIN, POLLOUT, POLLERR, POLLHUP timemult = 1 from threading import Thread, Event from time import time, sleep import sys from random import randrange all = POLLIN | POLLOUT class SingleSocket: def __init__(self, raw_server, sock, handler): self.raw_server = raw_server self.socket = sock self.handler = handler self.buffer = [] self.last_hit = time() self.fileno = sock.fileno() self.connected = False def get_ip(self): try: return self.socket.getpeername()[0] except socket.error: return 'no connection' def close(self): sock = self.socket self.socket = None self.buffer = [] del self.raw_server.single_sockets[self.fileno] self.raw_server.poll.unregister(sock) sock.close() def shutdown(self, val): self.socket.shutdown(val) def is_flushed(self): return len(self.buffer) == 0 def write(self, s): assert self.socket is not None self.buffer.append(s) if len(self.buffer) == 1: self.try_write() def try_write(self): if self.connected: try: while self.buffer != []: amount = self.socket.send(self.buffer[0]) if amount != len(self.buffer[0]): if amount != 0: self.buffer[0] = self.buffer[0][amount:] break del self.buffer[0] except socket.error, e: code, msg = e if code != EWOULDBLOCK: self.raw_server.dead_from_write.append(self) return if self.buffer == []: self.raw_server.poll.register(self.socket, POLLIN) else: self.raw_server.poll.register(self.socket, all) def default_error_handler(x): print x class RawServer: def __init__(self, doneflag, timeout_check_interval, timeout, noisy = True, errorfunc = default_error_handler, maxconnects = 55): self.timeout_check_interval = timeout_check_interval self.timeout = timeout self.poll = poll() # {socket: SingleSocket} self.single_sockets = {} self.dead_from_write = [] self.doneflag = doneflag self.noisy = noisy self.errorfunc = errorfunc self.maxconnects = maxconnects self.funcs = [] self.unscheduled_tasks = [] self.add_task(self.scan_for_timeouts, timeout_check_interval) def add_task(self, func, delay): self.unscheduled_tasks.append((func, delay)) def scan_for_timeouts(self): self.add_task(self.scan_for_timeouts, self.timeout_check_interval) t = time() - self.timeout tokill = [] for s in self.single_sockets.values(): if s.last_hit < t: tokill.append(s) for k in tokill: if k.socket is not None: self._close_socket(k) def bind(self, port, bind = '', reuse = False): self.bindaddr = bind server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if reuse: server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.setblocking(0) try: server.setsockopt(socket.IPPROTO_IP, socket.IP_TOS, 32) except: pass server.bind((bind, port)) server.listen(5) self.poll.register(server, POLLIN) self.server = server def start_connection(self, dns, handler = None): if handler is None: handler = self.handler sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setblocking(0) try: sock.setsockopt(socket.IPPROTO_IP, socket.IP_TOS, 32) except: pass sock.bind((self.bindaddr, 0)) try: sock.connect_ex(dns) except socket.error: raise except Exception, e: raise socket.error(str(e)) self.poll.register(sock, POLLIN) s = SingleSocket(self, sock, handler) self.single_sockets[sock.fileno()] = s return s def handle_events(self, events): for sock, event in events: if sock == self.server.fileno(): if event & (POLLHUP | POLLERR) != 0: self.poll.unregister(self.server) self.server.close() self.errorfunc('lost server socket') else: try: newsock, addr = self.server.accept() newsock.setblocking(0) if len(self.single_sockets) >= self.maxconnects: newsock.close() continue nss = SingleSocket(self, newsock, self.handler) self.single_sockets[newsock.fileno()] = nss self.poll.register(newsock, POLLIN) self.handler.external_connection_made(nss) except socket.error: sleep(1) else: s = self.single_sockets.get(sock) if s is None: continue s.connected = True if (event & (POLLHUP | POLLERR)) != 0: self._close_socket(s) continue if (event & POLLIN) != 0: try: s.last_hit = time() data = s.socket.recv(100000) if data == '': self._close_socket(s) else: s.handler.data_came_in(s, data) except socket.error, e: code, msg = e if code != EWOULDBLOCK: self._close_socket(s) continue if (event & POLLOUT) != 0 and s.socket is not None and not s.is_flushed(): s.try_write() if s.is_flushed(): s.handler.connection_flushed(s) def pop_unscheduled(self): try: while True: (func, delay) = self.unscheduled_tasks.pop() insort(self.funcs, (time() + delay, func)) except IndexError: pass def listen_forever(self, handler): self.handler = handler try: while not self.doneflag.isSet(): try: self.pop_unscheduled() if len(self.funcs) == 0: period = 2 ** 30 else: period = self.funcs[0][0] - time() if period < 0: period = 0 events = self.poll.poll(period * timemult) if self.doneflag.isSet(): return while len(self.funcs) > 0 and self.funcs[0][0] <= time(): garbage, func = self.funcs[0] del self.funcs[0] try: func() except KeyboardInterrupt: print_exc() return except: if self.noisy: data = StringIO() print_exc(file = data) self.errorfunc(data.getvalue()) self._close_dead() self.handle_events(events) if self.doneflag.isSet(): return self._close_dead() except error, e: if self.doneflag.isSet(): return # I can't find a coherent explanation for what the behavior should be here, # and people report conflicting behavior, so I'll just try all the possibilities try: code, msg, desc = e except: try: code, msg = e except: code = ENOBUFS if code == ENOBUFS: self.errorfunc("Have to exit due to the TCP stack flaking out") return except KeyboardInterrupt: print_exc() return except: data = StringIO() print_exc(file = data) self.errorfunc(data.getvalue()) finally: for ss in self.single_sockets.values(): ss.close() self.server.close() def _close_dead(self): while len(self.dead_from_write) > 0: old = self.dead_from_write self.dead_from_write = [] for s in old: if s.socket is not None: self._close_socket(s) def _close_socket(self, s): sock = s.socket.fileno() s.socket.close() self.poll.unregister(sock) del self.single_sockets[sock] s.socket = None s.handler.connection_lost(s) # everything below is for testing class DummyHandler: def __init__(self): self.external_made = [] self.data_in = [] self.lost = [] def external_connection_made(self, s): self.external_made.append(s) def data_came_in(self, s, data): self.data_in.append((s, data)) def connection_lost(self, s): self.lost.append(s) def connection_flushed(self, s): pass def sl(rs, handler, port): rs.bind(port) Thread(target = rs.listen_forever, args = [handler]).start() def loop(rs): x = [] def r(rs = rs, x = x): rs.add_task(x[0], .1) x.append(r) rs.add_task(r, .1) beginport = 5000 + randrange(10000) def test_starting_side_close(): try: fa = Event() fb = Event() da = DummyHandler() sa = RawServer(fa, 100, 100) loop(sa) sl(sa, da, beginport) db = DummyHandler() sb = RawServer(fb, 100, 100) loop(sb) sl(sb, db, beginport + 1) sleep(.5) ca = sa.start_connection(('127.0.0.1', beginport + 1)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert len(db.external_made) == 1 cb = db.external_made[0] del db.external_made[:] assert db.data_in == [] assert db.lost == [] ca.write('aaa') cb.write('bbb') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'bbb')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'aaa')] del db.data_in[:] assert db.lost == [] ca.write('ccc') cb.write('ddd') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'ddd')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'ccc')] del db.data_in[:] assert db.lost == [] ca.close() sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert db.external_made == [] assert db.data_in == [] assert db.lost == [cb] del db.lost[:] finally: fa.set() fb.set() def test_receiving_side_close(): try: da = DummyHandler() fa = Event() sa = RawServer(fa, 100, 100) loop(sa) sl(sa, da, beginport + 2) db = DummyHandler() fb = Event() sb = RawServer(fb, 100, 100) loop(sb) sl(sb, db, beginport + 3) sleep(.5) ca = sa.start_connection(('127.0.0.1', beginport + 3)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert len(db.external_made) == 1 cb = db.external_made[0] del db.external_made[:] assert db.data_in == [] assert db.lost == [] ca.write('aaa') cb.write('bbb') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'bbb')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'aaa')] del db.data_in[:] assert db.lost == [] ca.write('ccc') cb.write('ddd') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'ddd')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'ccc')] del db.data_in[:] assert db.lost == [] cb.close() sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [ca] del da.lost[:] assert db.external_made == [] assert db.data_in == [] assert db.lost == [] finally: fa.set() fb.set() def test_connection_refused(): try: da = DummyHandler() fa = Event() sa = RawServer(fa, 100, 100) loop(sa) sl(sa, da, beginport + 6) sleep(.5) ca = sa.start_connection(('127.0.0.1', beginport + 15)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [ca] del da.lost[:] finally: fa.set() def test_both_close(): try: da = DummyHandler() fa = Event() sa = RawServer(fa, 100, 100) loop(sa) sl(sa, da, beginport + 4) sleep(1) db = DummyHandler() fb = Event() sb = RawServer(fb, 100, 100) loop(sb) sl(sb, db, beginport + 5) sleep(.5) ca = sa.start_connection(('127.0.0.1', beginport + 5)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert len(db.external_made) == 1 cb = db.external_made[0] del db.external_made[:] assert db.data_in == [] assert db.lost == [] ca.write('aaa') cb.write('bbb') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'bbb')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'aaa')] del db.data_in[:] assert db.lost == [] ca.write('ccc') cb.write('ddd') sleep(1) assert da.external_made == [] assert da.data_in == [(ca, 'ddd')] del da.data_in[:] assert da.lost == [] assert db.external_made == [] assert db.data_in == [(cb, 'ccc')] del db.data_in[:] assert db.lost == [] ca.close() cb.close() sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert db.external_made == [] assert db.data_in == [] assert db.lost == [] finally: fa.set() fb.set() def test_normal(): l = [] f = Event() s = RawServer(f, 100, 100) loop(s) sl(s, DummyHandler(), beginport + 7) s.add_task(lambda l = l: l.append('b'), 2) s.add_task(lambda l = l: l.append('a'), 1) s.add_task(lambda l = l: l.append('d'), 4) sleep(1.5) s.add_task(lambda l = l: l.append('c'), 1.5) sleep(3) assert l == ['a', 'b', 'c', 'd'] f.set() def test_catch_exception(): l = [] f = Event() s = RawServer(f, 100, 100, False) loop(s) sl(s, DummyHandler(), beginport + 9) s.add_task(lambda l = l: l.append('b'), 2) s.add_task(lambda: 4/0, 1) sleep(3) assert l == ['b'] f.set() def test_closes_if_not_hit(): try: da = DummyHandler() fa = Event() sa = RawServer(fa, 2, 2) loop(sa) sl(sa, da, beginport + 14) sleep(1) db = DummyHandler() fb = Event() sb = RawServer(fb, 100, 100) loop(sb) sl(sb, db, beginport + 13) sleep(.5) sa.start_connection(('127.0.0.1', beginport + 13)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert len(db.external_made) == 1 del db.external_made[:] assert db.data_in == [] assert db.lost == [] sleep(3.1) assert len(da.lost) == 1 assert len(db.lost) == 1 finally: fa.set() fb.set() def test_does_not_close_if_hit(): try: fa = Event() fb = Event() da = DummyHandler() sa = RawServer(fa, 2, 2) loop(sa) sl(sa, da, beginport + 12) sleep(1) db = DummyHandler() sb = RawServer(fb, 100, 100) loop(sb) sl(sb, db, beginport + 13) sleep(.5) sa.start_connection(('127.0.0.1', beginport + 13)) sleep(1) assert da.external_made == [] assert da.data_in == [] assert da.lost == [] assert len(db.external_made) == 1 cb = db.external_made[0] del db.external_made[:] assert db.data_in == [] assert db.lost == [] cb.write('bbb') sleep(.5) assert da.lost == [] assert db.lost == [] finally: fa.set() fb.set() PenguinTV-4.2.0/penguintv/ptvbittorrent/bitfield.py0000644000000000000000000000722510646750246017403 0ustar # Written by Bram Cohen, Uoti Urpala, and John Hoffman # see LICENSE.txt for license information try: True except: True = 1 False = 0 bool = lambda x: not not x try: sum([1]) negsum = lambda a: len(a)-sum(a) except: negsum = lambda a: reduce(lambda x,y: x+(not y), a, 0) def _int_to_booleans(x): r = [] for i in range(8): r.append(bool(x & 0x80)) x <<= 1 return tuple(r) lookup_table = [_int_to_booleans(i) for i in range(256)] reverse_lookup_table = {} for i in xrange(256): reverse_lookup_table[lookup_table[i]] = chr(i) class Bitfield: def __init__(self, length, bitstring = None): self.length = length if bitstring is not None: extra = len(bitstring) * 8 - length if extra < 0 or extra >= 8: raise ValueError t = lookup_table r = [] for c in bitstring: r.extend(t[ord(c)]) if extra > 0: if r[-extra:] != [0] * extra: raise ValueError del r[-extra:] self.array = r self.numfalse = negsum(r) else: self.array = [False] * length self.numfalse = length def __setitem__(self, index, val): val = bool(val) self.numfalse += self.array[index]-val self.array[index] = val def __getitem__(self, index): return self.array[index] def __len__(self): return self.length def tostring(self): booleans = self.array t = reverse_lookup_table s = len(booleans) % 8 r = [ t[tuple(booleans[x:x+8])] for x in xrange(0, len(booleans)-s, 8) ] if s: r += t[tuple(booleans[-s:] + ([0] * (8-s)))] return ''.join(r) def complete(self): return not self.numfalse def test_bitfield(): try: x = Bitfield(7, 'ab') assert False except ValueError: pass try: x = Bitfield(7, 'ab') assert False except ValueError: pass try: x = Bitfield(9, 'abc') assert False except ValueError: pass try: x = Bitfield(0, 'a') assert False except ValueError: pass try: x = Bitfield(1, '') assert False except ValueError: pass try: x = Bitfield(7, '') assert False except ValueError: pass try: x = Bitfield(8, '') assert False except ValueError: pass try: x = Bitfield(9, 'a') assert False except ValueError: pass try: x = Bitfield(7, chr(1)) assert False except ValueError: pass try: x = Bitfield(9, chr(0) + chr(0x40)) assert False except ValueError: pass assert Bitfield(0, '').tostring() == '' assert Bitfield(1, chr(0x80)).tostring() == chr(0x80) assert Bitfield(7, chr(0x02)).tostring() == chr(0x02) assert Bitfield(8, chr(0xFF)).tostring() == chr(0xFF) assert Bitfield(9, chr(0) + chr(0x80)).tostring() == chr(0) + chr(0x80) x = Bitfield(1) assert x.numfalse == 1 x[0] = 1 assert x.numfalse == 0 x[0] = 1 assert x.numfalse == 0 assert x.tostring() == chr(0x80) x = Bitfield(7) assert len(x) == 7 x[6] = 1 assert x.numfalse == 6 assert x.tostring() == chr(0x02) x = Bitfield(8) x[7] = 1 assert x.tostring() == chr(1) x = Bitfield(9) x[8] = 1 assert x.numfalse == 8 assert x.tostring() == chr(0) + chr(0x80) x = Bitfield(8, chr(0xC4)) assert len(x) == 8 assert x.numfalse == 5 assert x.tostring() == chr(0xC4) PenguinTV-4.2.0/penguintv/ptvbittorrent/PiecePicker.py0000644000000000000000000001162610646750246020004 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from random import randrange, shuffle, choice class PiecePicker: def __init__(self, numpieces, rarest_first_cutoff = 1): self.rarest_first_cutoff = rarest_first_cutoff self.numpieces = numpieces self.interests = [range(numpieces)] self.pos_in_interests = range(numpieces) self.numinterests = [0] * numpieces self.started = [] self.seedstarted = [] self.numgot = 0 self.scrambled = range(numpieces) shuffle(self.scrambled) def got_have(self, piece): if self.numinterests[piece] is None: return numint = self.numinterests[piece] if numint == len(self.interests) - 1: self.interests.append([]) self.numinterests[piece] += 1 self._shift_over(piece, self.interests[numint], self.interests[numint + 1]) def lost_have(self, piece): if self.numinterests[piece] is None: return numint = self.numinterests[piece] self.numinterests[piece] -= 1 self._shift_over(piece, self.interests[numint], self.interests[numint - 1]) def _shift_over(self, piece, l1, l2): p = self.pos_in_interests[piece] l1[p] = l1[-1] self.pos_in_interests[l1[-1]] = p del l1[-1] newp = randrange(len(l2) + 1) if newp == len(l2): self.pos_in_interests[piece] = len(l2) l2.append(piece) else: old = l2[newp] self.pos_in_interests[old] = len(l2) l2.append(old) l2[newp] = piece self.pos_in_interests[piece] = newp def requested(self, piece, seed = False): if piece not in self.started: self.started.append(piece) if seed and piece not in self.seedstarted: self.seedstarted.append(piece) def complete(self, piece): assert self.numinterests[piece] is not None self.numgot += 1 l = self.interests[self.numinterests[piece]] p = self.pos_in_interests[piece] l[p] = l[-1] self.pos_in_interests[l[-1]] = p del l[-1] self.numinterests[piece] = None try: self.started.remove(piece) self.seedstarted.remove(piece) except ValueError: pass def next(self, havefunc, seed = False): bests = None bestnum = 2 ** 30 if seed: s = self.seedstarted else: s = self.started for i in s: if havefunc(i): if self.numinterests[i] < bestnum: bests = [i] bestnum = self.numinterests[i] elif self.numinterests[i] == bestnum: bests.append(i) if bests: return choice(bests) if self.numgot < self.rarest_first_cutoff: for i in self.scrambled: if havefunc(i): return i return None for i in xrange(1, min(bestnum, len(self.interests))): for j in self.interests[i]: if havefunc(j): return j return None def am_I_complete(self): return self.numgot == self.numpieces def bump(self, piece): l = self.interests[self.numinterests[piece]] pos = self.pos_in_interests[piece] del l[pos] l.append(piece) for i in range(pos,len(l)): self.pos_in_interests[l[i]] = i def test_requested(): p = PiecePicker(9) p.complete(8) p.got_have(0) p.got_have(2) p.got_have(4) p.got_have(6) p.requested(1) p.requested(1) p.requested(3) p.requested(0) p.requested(6) v = _pull(p) assert v[:2] == [1, 3] or v[:2] == [3, 1] assert v[2:4] == [0, 6] or v[2:4] == [6, 0] assert v[4:] == [2, 4] or v[4:] == [4, 2] def test_change_interest(): p = PiecePicker(9) p.complete(8) p.got_have(0) p.got_have(2) p.got_have(4) p.got_have(6) p.lost_have(2) p.lost_have(6) v = _pull(p) assert v == [0, 4] or v == [4, 0] def test_change_interest2(): p = PiecePicker(9) p.complete(8) p.got_have(0) p.got_have(2) p.got_have(4) p.got_have(6) p.lost_have(2) p.lost_have(6) v = _pull(p) assert v == [0, 4] or v == [4, 0] def test_complete(): p = PiecePicker(1) p.got_have(0) p.complete(0) assert _pull(p) == [] p.got_have(0) p.lost_have(0) def test_rarer_in_started_takes_priority(): p = PiecePicker(3) p.complete(2) p.requested(0) p.requested(1) p.got_have(1) p.got_have(0) p.got_have(0) assert _pull(p) == [1, 0] def test_zero(): assert _pull(PiecePicker(0)) == [] def _pull(pp): r = [] def want(p, r = r): return p not in r while True: n = pp.next(want) if n is None: break r.append(n) return r PenguinTV-4.2.0/penguintv/ptvbittorrent/selectpoll.py0000644000000000000000000000435710646750246017772 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from select import select, error from time import sleep from types import IntType from bisect import bisect POLLIN = 1 POLLOUT = 2 POLLERR = 8 POLLHUP = 16 class poll: def __init__(self): self.rlist = [] self.wlist = [] def register(self, f, t): if type(f) != IntType: f = f.fileno() if (t & POLLIN) != 0: insert(self.rlist, f) else: remove(self.rlist, f) if (t & POLLOUT) != 0: insert(self.wlist, f) else: remove(self.wlist, f) def unregister(self, f): if type(f) != IntType: f = f.fileno() remove(self.rlist, f) remove(self.wlist, f) def poll(self, timeout = None): if self.rlist != [] or self.wlist != []: r, w, e = select(self.rlist, self.wlist, [], timeout) else: sleep(timeout) return [] result = [] for s in r: result.append((s, POLLIN)) for s in w: result.append((s, POLLOUT)) return result def remove(list, item): i = bisect(list, item) if i > 0 and list[i-1] == item: del list[i-1] def insert(list, item): i = bisect(list, item) if i == 0 or list[i-1] != item: list.insert(i, item) def test_remove(): x = [2, 4, 6] remove(x, 2) assert x == [4, 6] x = [2, 4, 6] remove(x, 4) assert x == [2, 6] x = [2, 4, 6] remove(x, 6) assert x == [2, 4] x = [2, 4, 6] remove(x, 5) assert x == [2, 4, 6] x = [2, 4, 6] remove(x, 1) assert x == [2, 4, 6] x = [2, 4, 6] remove(x, 7) assert x == [2, 4, 6] x = [2, 4, 6] remove(x, 5) assert x == [2, 4, 6] x = [] remove(x, 3) assert x == [] def test_insert(): x = [2, 4] insert(x, 1) assert x == [1, 2, 4] x = [2, 4] insert(x, 3) assert x == [2, 3, 4] x = [2, 4] insert(x, 5) assert x == [2, 4, 5] x = [2, 4] insert(x, 2) assert x == [2, 4] x = [2, 4] insert(x, 4) assert x == [2, 4] x = [2, 3, 4] insert(x, 3) assert x == [2, 3, 4] x = [] insert(x, 3) assert x == [3] PenguinTV-4.2.0/penguintv/ptvbittorrent/zurllib.py0000644000000000000000000001061610646750246017302 0ustar # # zurllib.py # # This is (hopefully) a drop-in for urllib which will request gzip/deflate # compression and then decompress the output if a compressed response is # received while maintaining the API. # # by Robert Stone 2/22/2003 # from urllib import * from urllib2 import * from gzip import GzipFile from StringIO import StringIO from __init__ import version import pprint DEBUG=0 class HTTPContentEncodingHandler(HTTPHandler): """Inherit and add gzip/deflate/etc support to HTTP gets.""" def http_open(self, req): # add the Accept-Encoding header to the request # support gzip encoding (identity is assumed) req.add_header("Accept-Encoding","gzip") req.add_header('User-Agent', 'BitTorrent/' + version) if DEBUG: print "Sending:" print req.headers print "\n" fp = HTTPHandler.http_open(self,req) headers = fp.headers if DEBUG: pprint.pprint(headers.dict) url = fp.url resp = addinfourldecompress(fp, headers, url) # As of Python 2.4 http_open response also has 'code' and 'msg' # members, and HTTPErrorProcessor breaks if they don't exist. if 'code' in dir(fp): resp.code = fp.code if 'msg' in dir(fp): resp.msg = fp.msg return resp class addinfourldecompress(addinfourl): """Do gzip decompression if necessary. Do addinfourl stuff too.""" def __init__(self, fp, headers, url): # we need to do something more sophisticated here to deal with # multiple values? What about other weird crap like q-values? # basically this only works for the most simplistic case and will # break in some other cases, but for now we only care about making # this work with the BT tracker so.... if headers.has_key('content-encoding') and headers['content-encoding'] == 'gzip': if DEBUG: print "Contents of Content-encoding: " + headers['Content-encoding'] + "\n" self.gzip = 1 self.rawfp = fp fp = GzipStream(fp) else: self.gzip = 0 return addinfourl.__init__(self, fp, headers, url) def close(self): self.fp.close() if self.gzip: self.rawfp.close() def iscompressed(self): return self.gzip class GzipStream(StringIO): """Magically decompress a file object. This is not the most efficient way to do this but GzipFile() wants to seek, etc, which won't work for a stream such as that from a socket. So we copy the whole shebang info a StringIO object, decompress that then let people access the decompressed output as a StringIO object. The disadvantage is memory use and the advantage is random access. Will mess with fixing this later. """ def __init__(self,fp): self.fp = fp # this is nasty and needs to be fixed at some point # copy everything into a StringIO (compressed) compressed = StringIO() r = fp.read() while r: compressed.write(r) r = fp.read() # now, unzip (gz) the StringIO to a string compressed.seek(0,0) gz = GzipFile(fileobj = compressed) str = '' r = gz.read() while r: str += r r = gz.read() # close our utility files compressed.close() gz.close() # init our stringio selves with the string StringIO.__init__(self, str) del str def close(self): self.fp.close() return StringIO.close(self) def test(): """Test this module. At the moment this is lame. """ print "Running unit tests.\n" def printcomp(fp): try: if fp.iscompressed(): print "GET was compressed.\n" else: print "GET was uncompressed.\n" except: print "no iscompressed function! this shouldn't happen" print "Trying to GET a compressed document...\n" fp = urlopen('http://a.scarywater.net/hng/index.shtml') print fp.read() printcomp(fp) fp.close() print "Trying to GET an unknown document...\n" fp = urlopen('http://www.otaku.org/') print fp.read() printcomp(fp) fp.close() # # Install the HTTPContentEncodingHandler that we've defined above. # install_opener(build_opener(HTTPContentEncodingHandler)) if __name__ == '__main__': test() PenguinTV-4.2.0/penguintv/ptvbittorrent/NatCheck.py0000644000000000000000000000513210646750246017274 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from cStringIO import StringIO from socket import error as socketerror protocol_name = 'BitTorrent protocol' # header, reserved, download id, my id, [length, message] class NatCheck: def __init__(self, resultfunc, downloadid, peerid, ip, port, rawserver): self.resultfunc = resultfunc self.downloadid = downloadid self.peerid = peerid self.ip = ip self.port = port self.closed = False self.buffer = StringIO() self.next_len = 1 self.next_func = self.read_header_len try: self.connection = rawserver.start_connection((ip, port), self) self.connection.write(chr(len(protocol_name)) + protocol_name + (chr(0) * 8) + downloadid) except socketerror: self.answer(False) except IOError: self.answer(False) def answer(self, result): self.closed = True try: self.connection.close() except AttributeError: pass self.resultfunc(result, self.downloadid, self.peerid, self.ip, self.port) def read_header_len(self, s): if ord(s) != len(protocol_name): return None return len(protocol_name), self.read_header def read_header(self, s): if s != protocol_name: return None return 8, self.read_reserved def read_reserved(self, s): return 20, self.read_download_id def read_download_id(self, s): if s != self.downloadid: return None return 20, self.read_peer_id def read_peer_id(self, s): if s != self.peerid: return None self.answer(True) return None def data_came_in(self, connection, s): while True: if self.closed: return i = self.next_len - self.buffer.tell() if i > len(s): self.buffer.write(s) return self.buffer.write(s[:i]) s = s[i:] m = self.buffer.getvalue() self.buffer.reset() self.buffer.truncate() x = self.next_func(m) if x is None: if not self.closed: self.answer(False) return self.next_len, self.next_func = x def connection_lost(self, connection): if not self.closed: self.closed = True self.resultfunc(False, self.downloadid, self.peerid, self.ip, self.port) def connection_flushed(self, connection): pass PenguinTV-4.2.0/penguintv/ptvbittorrent/Encrypter.py0000644000000000000000000004454510646750246017602 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from cStringIO import StringIO from binascii import b2a_hex from socket import error as socketerror protocol_name = 'BitTorrent protocol' def toint(s): return long(b2a_hex(s), 16) def tobinary(i): return (chr(i >> 24) + chr((i >> 16) & 0xFF) + chr((i >> 8) & 0xFF) + chr(i & 0xFF)) # header, reserved, download id, my id, [length, message] class Connection: def __init__(self, Encoder, connection, id, is_local): self.encoder = Encoder self.connection = connection self.id = id self.locally_initiated = is_local self.complete = False self.closed = False self.buffer = StringIO() self.next_len = 1 self.next_func = self.read_header_len if self.locally_initiated: connection.write(chr(len(protocol_name)) + protocol_name + (chr(0) * 8) + self.encoder.download_id) if self.id is not None: connection.write(self.encoder.my_id) def get_ip(self): return self.connection.get_ip() def get_id(self): return self.id def is_locally_initiated(self): return self.locally_initiated def is_flushed(self): return self.connection.is_flushed() def read_header_len(self, s): if ord(s) != len(protocol_name): return None return len(protocol_name), self.read_header def read_header(self, s): if s != protocol_name: return None return 8, self.read_reserved def read_reserved(self, s): return 20, self.read_download_id def read_download_id(self, s): if s != self.encoder.download_id: return None if not self.locally_initiated: self.connection.write(chr(len(protocol_name)) + protocol_name + (chr(0) * 8) + self.encoder.download_id + self.encoder.my_id) return 20, self.read_peer_id def read_peer_id(self, s): if not self.id: if s == self.encoder.my_id: return None for v in self.encoder.connections.values(): if s and v.id == s: return None self.id = s if self.locally_initiated: self.connection.write(self.encoder.my_id) else: self.encoder.everinc = True else: if s != self.id: return None self.complete = True self.encoder.connecter.connection_made(self) return 4, self.read_len def read_len(self, s): l = toint(s) if l > self.encoder.max_len: return None return l, self.read_message def read_message(self, s): if s != '': self.encoder.connecter.got_message(self, s) return 4, self.read_len def read_dead(self, s): return None def close(self): if not self.closed: self.connection.close() self.sever() def sever(self): self.closed = True del self.encoder.connections[self.connection] if self.complete: self.encoder.connecter.connection_lost(self) def send_message(self, message): self.connection.write(tobinary(len(message)) + message) def data_came_in(self, s): while True: if self.closed: return i = self.next_len - self.buffer.tell() if i > len(s): self.buffer.write(s) return self.buffer.write(s[:i]) s = s[i:] m = self.buffer.getvalue() self.buffer.reset() self.buffer.truncate() try: x = self.next_func(m) except: self.next_len, self.next_func = 1, self.read_dead raise if x is None: self.close() return self.next_len, self.next_func = x class Encoder: def __init__(self, connecter, raw_server, my_id, max_len, schedulefunc, keepalive_delay, download_id, max_initiate = 40): self.raw_server = raw_server self.connecter = connecter self.my_id = my_id self.max_len = max_len self.schedulefunc = schedulefunc self.keepalive_delay = keepalive_delay self.download_id = download_id self.max_initiate = max_initiate self.everinc = False self.connections = {} self.spares = [] schedulefunc(self.send_keepalives, keepalive_delay) def send_keepalives(self): self.schedulefunc(self.send_keepalives, self.keepalive_delay) for c in self.connections.values(): if c.complete: c.send_message('') def start_connection(self, dns, id): if id: if id == self.my_id: return for v in self.connections.values(): if v.id == id: return if len(self.connections) >= self.max_initiate: if len(self.spares) < self.max_initiate and dns not in self.spares: self.spares.append(dns) return try: c = self.raw_server.start_connection(dns) self.connections[c] = Connection(self, c, id, True) except socketerror: pass def _start_connection(self, dns, id): def foo(self=self, dns=dns, id=id): self.start_connection(dns, id) self.schedulefunc(foo, 0) def got_id(self, connection): for v in self.connections.values(): if connection is not v and connection.id == v.id: connection.close() return self.connecter.connection_made(connection) def ever_got_incoming(self): return self.everinc def external_connection_made(self, connection): self.connections[connection] = Connection(self, connection, None, False) def connection_flushed(self, connection): c = self.connections[connection] if c.complete: self.connecter.connection_flushed(c) def connection_lost(self, connection): self.connections[connection].sever() while len(self.connections) < self.max_initiate and self.spares: self.start_connection(self.spares.pop(), None) def data_came_in(self, connection, data): self.connections[connection].data_came_in(data) # everything below is for testing class DummyConnecter: def __init__(self): self.log = [] self.close_next = False def connection_made(self, connection): self.log.append(('made', connection)) def connection_lost(self, connection): self.log.append(('lost', connection)) def connection_flushed(self, connection): self.log.append(('flushed', connection)) def got_message(self, connection, message): self.log.append(('got', connection, message)) if self.close_next: connection.close() class DummyRawServer: def __init__(self): self.connects = [] def start_connection(self, dns): c = DummyRawConnection() self.connects.append((dns, c)) return c class DummyRawConnection: def __init__(self): self.closed = False self.data = [] self.flushed = True def get_ip(self): return 'fake.ip' def is_flushed(self): return self.flushed def write(self, data): assert not self.closed self.data.append(data) def close(self): assert not self.closed self.closed = True def pop(self): r = ''.join(self.data) del self.data[:] return r def dummyschedule(a, b): pass def test_messages_in_and_out(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, 'b' * 20) assert c1.pop() == '' assert len(c.log) == 1 assert c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert rs.connects == [] assert not c1.closed assert ch.get_ip() == 'fake.ip' ch.send_message('abc') assert c1.pop() == chr(0) * 3 + chr(3) + 'abc' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(0) * 3 + chr(3) + 'def') assert c1.pop() == '' assert c.log == [('got', ch, 'def')] del c.log[:] assert rs.connects == [] assert not c1.closed def test_flushed(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.connection_flushed(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, 'b' * 20) assert c1.pop() == '' assert len(c.log) == 1 assert c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert rs.connects == [] assert not c1.closed assert ch.is_flushed() e.connection_flushed(c1) assert c1.pop() == '' assert c.log == [('flushed', ch)] assert rs.connects == [] assert not c1.closed c1.flushed = False assert not ch.is_flushed() def test_wrong_header_length(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(5) * 30) assert c.log == [] assert c1.closed def test_wrong_header(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + 'a' * len(protocol_name)) assert c.log == [] assert c1.closed def test_wrong_download_id(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'e' * 20) assert c1.pop() == '' assert c.log == [] assert c1.closed def test_wrong_other_id(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) e.start_connection('dns', 'o' * 20) assert c.log == [] assert len(rs.connects) == 1 assert rs.connects[0][0] == 'dns' c1 = rs.connects[0][1] del rs.connects[:] assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'b' * 20) assert c.log == [] assert c1.closed def test_over_max_len(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert not c1.closed e.data_came_in(c1, chr(1) + chr(0) * 3) assert c.log == [('lost', ch)] assert c1.closed def test_keepalive(): s = [] def sched(interval, thing, s = s): s.append((interval, thing)) c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, sched, 30, 'd' * 20) assert len(s) == 1 assert s[0][1] == 30 kfunc = s[0][0] del s[:] c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert not c1.closed kfunc() assert c1.pop() == '' assert c.log == [] assert not c1.closed assert s == [(kfunc, 30)] del s[:] e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert len(c.log) == 1 and c.log[0][0] == 'made' del c.log[:] assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert not c1.closed kfunc() assert c1.pop() == chr(0) * 4 assert c.log == [] assert not c1.closed def test_swallow_keepalive(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' del c.log[:] assert not c1.closed e.data_came_in(c1, chr(0) * 4) assert c.log == [] assert not c1.closed def test_local_close(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert not c1.closed ch.close() assert c.log == [('lost', ch)] del c.log[:] assert c1.closed def test_local_close_in_message_receive(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert not c1.closed c.close_next = True e.data_came_in(c1, chr(0) * 3 + chr(4) + 'abcd') assert c.log == [('got', ch, 'abcd'), ('lost', ch)] assert c1.closed def test_remote_close(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' ch = c.log[0][1] del c.log[:] assert not c1.closed e.connection_lost(c1) assert c.log == [('lost', ch)] assert not c1.closed def test_partial_data_in(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 4) e.data_came_in(c1, chr(0) * 4 + 'd' * 20 + 'c' * 10) e.data_came_in(c1, 'c' * 10) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' del c.log[:] assert not c1.closed def test_ignore_connect_of_extant(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.external_connection_made(c1) assert c1.pop() == '' assert c.log == [] assert rs.connects == [] assert not c1.closed e.data_came_in(c1, chr(len(protocol_name)) + protocol_name + chr(0) * 8 + 'd' * 20 + 'o' * 20) assert c1.pop() == chr(len(protocol_name)) + protocol_name + \ chr(0) * 8 + 'd' * 20 + 'a' * 20 assert len(c.log) == 1 and c.log[0][0] == 'made' del c.log[:] assert not c1.closed e.start_connection('dns', 'o' * 20) assert c.log == [] assert rs.connects == [] assert not c1.closed def test_ignore_connect_to_self(): c = DummyConnecter() rs = DummyRawServer() e = Encoder(c, rs, 'a' * 20, 500, dummyschedule, 30, 'd' * 20) c1 = DummyRawConnection() e.start_connection('dns', 'a' * 20) assert c.log == [] assert rs.connects == [] assert not c1.closed def test_conversion(): assert toint(tobinary(50000)) == 50000 PenguinTV-4.2.0/penguintv/ptvbittorrent/parseargs.py0000644000000000000000000000677310646750246017617 0ustar # Written by Bill Bumgarner and Bram Cohen # see LICENSE.txt for license information from types import * from cStringIO import StringIO def formatDefinitions(options, COLS): s = StringIO() indent = " " * 10 width = COLS - 11 if width < 15: width = COLS - 2 indent = " " for (longname, default, doc) in options: s.write('--' + longname + ' \n') if default is not None: doc += ' (defaults to ' + repr(default) + ')' i = 0 for word in doc.split(): if i == 0: s.write(indent + word) i = len(word) elif i + len(word) >= width: s.write('\n' + indent + word) i = len(word) else: s.write(' ' + word) i += len(word) + 1 s.write('\n\n') return s.getvalue() def usage(str): raise ValueError(str) def parseargs(argv, options, minargs = None, maxargs = None): config = {} longkeyed = {} for option in options: longname, default, doc = option longkeyed[longname] = option config[longname] = default options = [] args = [] pos = 0 while pos < len(argv): if argv[pos][:2] != '--': args.append(argv[pos]) pos += 1 else: if pos == len(argv) - 1: usage('parameter passed in at end with no value') key, value = argv[pos][2:], argv[pos+1] pos += 2 if not longkeyed.has_key(key): usage('unknown key --' + key) longname, default, doc = longkeyed[key] try: t = type(config[longname]) if t is NoneType or t is StringType: config[longname] = value elif t in (IntType, LongType): config[longname] = long(value) elif t is FloatType: config[longname] = float(value) else: assert 0 except ValueError, e: usage('wrong format of --%s - %s' % (key, str(e))) for key, value in config.items(): if value is None: usage("Option --%s is required." % key) if minargs is not None and len(args) < minargs: usage("Must supply at least %d args." % minargs) if maxargs is not None and len(args) > maxargs: usage("Too many args - %d max." % maxargs) return (config, args) def test_parseargs(): assert parseargs(('d', '--a', 'pq', 'e', '--b', '3', '--c', '4.5', 'f'), (('a', 'x', ''), ('b', 1, ''), ('c', 2.3, ''))) == ({'a': 'pq', 'b': 3, 'c': 4.5}, ['d', 'e', 'f']) assert parseargs([], [('a', 'x', '')]) == ({'a': 'x'}, []) assert parseargs(['--a', 'x', '--a', 'y'], [('a', '', '')]) == ({'a': 'y'}, []) try: parseargs([], [('a', 'x', '')]) except ValueError: pass try: parseargs(['--a', 'x'], []) except ValueError: pass try: parseargs(['--a'], [('a', 'x', '')]) except ValueError: pass try: parseargs([], [], 1, 2) except ValueError: pass assert parseargs(['x'], [], 1, 2) == ({}, ['x']) assert parseargs(['x', 'y'], [], 1, 2) == ({}, ['x', 'y']) try: parseargs(['x', 'y', 'z'], [], 1, 2) except ValueError: pass try: parseargs(['--a', '2.0'], [('a', 3, '')]) except ValueError: pass try: parseargs(['--a', 'z'], [('a', 2.1, '')]) except ValueError: pass PenguinTV-4.2.0/penguintv/ptvbittorrent/StorageWrapper.py0000644000000000000000000004120510646750246020562 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from sha import sha from threading import Event from bitfield import Bitfield def dummy_status(fractionDone = None, activity = None): pass def dummy_data_flunked(size): pass class StorageWrapper: def __init__(self, storage, request_size, hashes, piece_size, finished, failed, statusfunc = dummy_status, flag = Event(), check_hashes = True, data_flunked = dummy_data_flunked): self.storage = storage self.request_size = request_size self.hashes = hashes self.piece_size = piece_size self.data_flunked = data_flunked self.total_length = storage.get_total_length() self.amount_left = self.total_length if self.total_length <= piece_size * (len(hashes) - 1): raise ValueError, 'bad data from tracker - total too small' if self.total_length > piece_size * len(hashes): raise ValueError, 'bad data from tracker - total too big' self.finished = finished self.failed = failed self.numactive = [0] * len(hashes) self.inactive_requests = [1] * len(hashes) self.amount_inactive = self.total_length self.endgame = False self.have = Bitfield(len(hashes)) self.waschecked = [check_hashes] * len(hashes) self.places = {} self.holes = [] if len(hashes) == 0: finished() return targets = {} total = len(hashes) for i in xrange(len(hashes)): if not self._waspre(i): targets.setdefault(hashes[i], []).append(i) total -= 1 numchecked = 0.0 if total and check_hashes: statusfunc({"activity" : 'checking existing file', "fractionDone" : 0}) def markgot(piece, pos, self = self, check_hashes = check_hashes): self.places[piece] = pos self.have[piece] = True self.amount_left -= self._piecelen(piece) self.amount_inactive -= self._piecelen(piece) self.inactive_requests[piece] = None self.waschecked[piece] = check_hashes lastlen = self._piecelen(len(hashes) - 1) for i in xrange(len(hashes)): if not self._waspre(i): self.holes.append(i) elif not check_hashes: markgot(i, i) else: sh = sha(self.storage.read(piece_size * i, lastlen)) sp = sh.digest() sh.update(self.storage.read(piece_size * i + lastlen, self._piecelen(i) - lastlen)) s = sh.digest() if s == hashes[i]: markgot(i, i) elif targets.get(s) and self._piecelen(i) == self._piecelen(targets[s][-1]): markgot(targets[s].pop(), i) elif not self.have[len(hashes) - 1] and sp == hashes[-1] and (i == len(hashes) - 1 or not self._waspre(len(hashes) - 1)): markgot(len(hashes) - 1, i) else: self.places[i] = i if flag.isSet(): return numchecked += 1 statusfunc({'fractionDone': 1 - float(self.amount_left) / self.total_length}) if self.amount_left == 0: finished() def _waspre(self, piece): return self.storage.was_preallocated(piece * self.piece_size, self._piecelen(piece)) def _piecelen(self, piece): if piece < len(self.hashes) - 1: return self.piece_size else: return self.total_length - piece * self.piece_size def get_amount_left(self): return self.amount_left def do_I_have_anything(self): return self.amount_left < self.total_length def _make_inactive(self, index): length = min(self.piece_size, self.total_length - self.piece_size * index) l = [] x = 0 while x + self.request_size < length: l.append((x, self.request_size)) x += self.request_size l.append((x, length - x)) self.inactive_requests[index] = l def is_endgame(self): return self.endgame def get_have_list(self): return self.have.tostring() def do_I_have(self, index): return self.have[index] def do_I_have_requests(self, index): return not not self.inactive_requests[index] def new_request(self, index): # returns (begin, length) if self.inactive_requests[index] == 1: self._make_inactive(index) self.numactive[index] += 1 rs = self.inactive_requests[index] r = min(rs) rs.remove(r) self.amount_inactive -= r[1] if self.amount_inactive == 0: self.endgame = True return r def piece_came_in(self, index, begin, piece): try: return self._piece_came_in(index, begin, piece) except IOError, e: self.failed('IO Error ' + str(e)) return True def _piece_came_in(self, index, begin, piece): if not self.places.has_key(index): n = self.holes.pop(0) if self.places.has_key(n): oldpos = self.places[n] old = self.storage.read(self.piece_size * oldpos, self._piecelen(n)) if self.have[n] and sha(old).digest() != self.hashes[n]: self.failed('data corrupted on disk - maybe you have two copies running?') return True self.storage.write(self.piece_size * n, old) self.places[n] = n if index == oldpos or index in self.holes: self.places[index] = oldpos else: for p, v in self.places.items(): if v == index: break self.places[index] = index self.places[p] = oldpos old = self.storage.read(self.piece_size * index, self.piece_size) self.storage.write(self.piece_size * oldpos, old) elif index in self.holes or index == n: if not self._waspre(n): self.storage.write(self.piece_size * n, self._piecelen(n) * chr(0xFF)) self.places[index] = n else: for p, v in self.places.items(): if v == index: break self.places[index] = index self.places[p] = n old = self.storage.read(self.piece_size * index, self._piecelen(n)) self.storage.write(self.piece_size * n, old) self.storage.write(self.places[index] * self.piece_size + begin, piece) self.numactive[index] -= 1 if not self.inactive_requests[index] and not self.numactive[index]: if sha(self.storage.read(self.piece_size * self.places[index], self._piecelen(index))).digest() == self.hashes[index]: self.have[index] = True self.inactive_requests[index] = None self.waschecked[index] = True self.amount_left -= self._piecelen(index) if self.amount_left == 0: self.finished() else: self.data_flunked(self._piecelen(index)) self.inactive_requests[index] = 1 self.amount_inactive += self._piecelen(index) return False return True def request_lost(self, index, begin, length): self.inactive_requests[index].append((begin, length)) self.amount_inactive += length self.numactive[index] -= 1 def get_piece(self, index, begin, length): try: return self._get_piece(index, begin, length) except IOError, e: self.failed('IO Error ' + str(e)) return None def _get_piece(self, index, begin, length): if not self.have[index]: return None if not self.waschecked[index]: if sha(self.storage.read(self.piece_size * self.places[index], self._piecelen(index))).digest() != self.hashes[index]: self.failed('told file complete on start-up, but piece failed hash check') return None self.waschecked[index] = True if begin + length > self._piecelen(index): return None return self.storage.read(self.piece_size * self.places[index] + begin, length) class DummyStorage: def __init__(self, total, pre = False, ranges = []): self.pre = pre self.ranges = ranges self.s = chr(0xFF) * total self.done = False def was_preexisting(self): return self.pre def was_preallocated(self, begin, length): for b, l in self.ranges: if begin >= b and begin + length <= b + l: return True return False def get_total_length(self): return len(self.s) def read(self, begin, length): return self.s[begin:begin + length] def write(self, begin, piece): self.s = self.s[:begin] + piece + self.s[begin + len(piece):] def finished(self): self.done = True def test_basic(): ds = DummyStorage(3) sw = StorageWrapper(ds, 2, [sha('abc').digest()], 4, ds.finished, None) assert sw.get_amount_left() == 3 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert sw.do_I_have_requests(0) x = [] x.append(sw.new_request(0)) assert sw.do_I_have_requests(0) x.append(sw.new_request(0)) assert not sw.do_I_have_requests(0) x.sort() assert x == [(0, 2), (2, 1)] sw.request_lost(0, 2, 1) del x[-1] assert sw.do_I_have_requests(0) x.append(sw.new_request(0)) assert x == [(0, 2), (2, 1)] assert not sw.do_I_have_requests(0) sw.piece_came_in(0, 0, 'ab') assert not sw.do_I_have_requests(0) assert sw.get_amount_left() == 3 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert not ds.done sw.piece_came_in(0, 2, 'c') assert not sw.do_I_have_requests(0) assert sw.get_amount_left() == 0 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0x80) assert sw.get_piece(0, 0, 3) == 'abc' assert sw.get_piece(0, 1, 2) == 'bc' assert sw.get_piece(0, 0, 2) == 'ab' assert sw.get_piece(0, 1, 1) == 'b' assert ds.done def test_two_pieces(): ds = DummyStorage(4) sw = StorageWrapper(ds, 3, [sha('abc').digest(), sha('d').digest()], 3, ds.finished, None) assert sw.get_amount_left() == 4 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert sw.do_I_have_requests(0) assert sw.do_I_have_requests(1) assert sw.new_request(0) == (0, 3) assert sw.get_amount_left() == 4 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert not sw.do_I_have_requests(0) assert sw.do_I_have_requests(1) assert sw.new_request(1) == (0, 1) assert sw.get_amount_left() == 4 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert not sw.do_I_have_requests(0) assert not sw.do_I_have_requests(1) sw.piece_came_in(0, 0, 'abc') assert sw.get_amount_left() == 1 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0x80) assert not sw.do_I_have_requests(0) assert not sw.do_I_have_requests(1) assert sw.get_piece(0, 0, 3) == 'abc' assert not ds.done sw.piece_came_in(1, 0, 'd') assert ds.done assert sw.get_amount_left() == 0 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0xC0) assert not sw.do_I_have_requests(0) assert not sw.do_I_have_requests(1) assert sw.get_piece(1, 0, 1) == 'd' def test_hash_fail(): ds = DummyStorage(4) sw = StorageWrapper(ds, 4, [sha('abcd').digest()], 4, ds.finished, None) assert sw.get_amount_left() == 4 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert sw.do_I_have_requests(0) assert sw.new_request(0) == (0, 4) sw.piece_came_in(0, 0, 'abcx') assert sw.get_amount_left() == 4 assert not sw.do_I_have_anything() assert sw.get_have_list() == chr(0) assert sw.do_I_have_requests(0) assert sw.new_request(0) == (0, 4) assert not ds.done sw.piece_came_in(0, 0, 'abcd') assert ds.done assert sw.get_amount_left() == 0 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0x80) assert not sw.do_I_have_requests(0) def test_lazy_hashing(): ds = DummyStorage(4, ranges = [(0, 4)]) flag = Event() sw = StorageWrapper(ds, 4, [sha('abcd').digest()], 4, ds.finished, lambda x, flag = flag: flag.set(), check_hashes = False) assert sw.get_piece(0, 0, 2) is None assert flag.isSet() def test_lazy_hashing_pass(): ds = DummyStorage(4) flag = Event() sw = StorageWrapper(ds, 4, [sha(chr(0xFF) * 4).digest()], 4, ds.finished, lambda x, flag = flag: flag.set(), check_hashes = False) assert sw.get_piece(0, 0, 2) is None assert not flag.isSet() def test_preexisting(): ds = DummyStorage(4, True, [(0, 4)]) sw = StorageWrapper(ds, 2, [sha(chr(0xFF) * 2).digest(), sha('ab').digest()], 2, ds.finished, None) assert sw.get_amount_left() == 2 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0x80) assert not sw.do_I_have_requests(0) assert sw.do_I_have_requests(1) assert sw.new_request(1) == (0, 2) assert not ds.done sw.piece_came_in(1, 0, 'ab') assert ds.done assert sw.get_amount_left() == 0 assert sw.do_I_have_anything() assert sw.get_have_list() == chr(0xC0) assert not sw.do_I_have_requests(0) assert not sw.do_I_have_requests(1) def test_total_too_short(): ds = DummyStorage(4) try: StorageWrapper(ds, 4, [sha(chr(0xff) * 4).digest(), sha(chr(0xFF) * 4).digest()], 4, ds.finished, None) raise 'fail' except ValueError: pass def test_total_too_big(): ds = DummyStorage(9) try: sw = StorageWrapper(ds, 4, [sha('qqqq').digest(), sha(chr(0xFF) * 4).digest()], 4, ds.finished, None) raise 'fail' except ValueError: pass def test_end_above_total_length(): ds = DummyStorage(3, True) sw = StorageWrapper(ds, 4, [sha('qqq').digest()], 4, ds.finished, None) assert sw.get_piece(0, 0, 4) == None def test_end_past_piece_end(): ds = DummyStorage(4, True, ranges = [(0, 4)]) sw = StorageWrapper(ds, 4, [sha(chr(0xFF) * 2).digest(), sha(chr(0xFF) * 2).digest()], 2, ds.finished, None) assert ds.done assert sw.get_piece(0, 0, 3) == None from random import shuffle def test_alloc_random(): ds = DummyStorage(101) sw = StorageWrapper(ds, 1, [sha(chr(i)).digest() for i in xrange(101)], 1, ds.finished, None) for i in xrange(100): assert sw.new_request(i) == (0, 1) r = range(100) shuffle(r) for i in r: sw.piece_came_in(i, 0, chr(i)) for i in xrange(100): assert sw.get_piece(i, 0, 1) == chr(i) assert ds.s[:100] == ''.join([chr(i) for i in xrange(100)]) def test_alloc_resume(): ds = DummyStorage(101) sw = StorageWrapper(ds, 1, [sha(chr(i)).digest() for i in xrange(101)], 1, ds.finished, None) for i in xrange(100): assert sw.new_request(i) == (0, 1) r = range(100) shuffle(r) for i in r[:50]: sw.piece_came_in(i, 0, chr(i)) assert ds.s[50:] == chr(0xFF) * 51 ds.ranges = [(0, 50)] sw = StorageWrapper(ds, 1, [sha(chr(i)).digest() for i in xrange(101)], 1, ds.finished, None) for i in r[50:]: sw.piece_came_in(i, 0, chr(i)) assert ds.s[:100] == ''.join([chr(i) for i in xrange(100)]) def test_last_piece_pre(): ds = DummyStorage(3, ranges = [(2, 1)]) ds.s = chr(0xFF) + chr(0xFF) + 'c' sw = StorageWrapper(ds, 2, [sha('ab').digest(), sha('c').digest()], 2, ds.finished, None) assert not sw.do_I_have_requests(1) assert sw.do_I_have_requests(0) def test_not_last_pre(): ds = DummyStorage(3, ranges = [(1, 1)]) ds.s = chr(0xFF) + 'a' + chr(0xFF) sw = StorageWrapper(ds, 1, [sha('a').digest()] * 3, 1, ds.finished, None) assert not sw.do_I_have_requests(1) assert sw.do_I_have_requests(0) assert sw.do_I_have_requests(2) def test_last_piece_not_pre(): ds = DummyStorage(51, ranges = [(50, 1)]) sw = StorageWrapper(ds, 2, [sha('aa').digest()] * 25 + [sha('b').digest()], 2, ds.finished, None) for i in xrange(25): assert sw.new_request(i) == (0, 2) assert sw.new_request(25) == (0, 1) sw.piece_came_in(25, 0, 'b') r = range(25) shuffle(r) for i in r: sw.piece_came_in(i, 0, 'aa') assert ds.done assert ds.s == 'a' * 50 + 'b' PenguinTV-4.2.0/penguintv/ptvbittorrent/btcompletedir.py0000755000000000000000000000214410646750246020454 0ustar #!/usr/bin/python # Written by Bram Cohen # see LICENSE.txt for license information from os.path import join, split from threading import Event from traceback import print_exc from sys import argv from BitTorrent.btmakemetafile import calcsize, make_meta_file, ignore def dummy(x): pass def completedir(files, url, flag = Event(), vc = dummy, fc = dummy, piece_len_pow2 = None): files.sort() ext = '.torrent' togen = [] for f in files: if f[-len(ext):] != ext: togen.append(f) total = 0 for i in togen: total += calcsize(i) subtotal = [0] def callback(x, subtotal = subtotal, total = total, vc = vc): subtotal[0] += x vc(float(subtotal[0]) / total) for i in togen: t = split(i) if t[1] == '': i = t[0] fc(i) try: make_meta_file(i, url, flag = flag, progress = callback, progress_percent=0, piece_len_exp = piece_len_pow2) except ValueError: print_exc() def dc(v): print v if __name__ == '__main__': completedir(argv[2:], argv[1], fc = dc) PenguinTV-4.2.0/penguintv/ptvbittorrent/Storage.py0000644000000000000000000001164010646750246017221 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from sha import sha from bisect import bisect_right class Storage: def __init__(self, files, open, exists, getsize): # can raise IOError and ValueError self.ranges = [] total = 0l so_far = 0l for file, length in files: if length != 0: self.ranges.append((total, total + length, file)) total += length if exists(file): l = getsize(file) if l > length: l = length so_far += l elif not exists(file): open(file, 'wb').close() self.begins = [i[0] for i in self.ranges] self.total_length = total self.handles = {} self.whandles = {} self.tops = {} for file, length in files: if exists(file): l = getsize(file) if l != length: self.handles[file] = open(file, 'rb+') self.whandles[file] = 1 if l > length: self.handles[file].truncate(length) else: self.handles[file] = open(file, 'rb') self.tops[file] = l else: self.handles[file] = open(file, 'wb+') self.whandles[file] = 1 def was_preallocated(self, pos, length): for file, begin, end in self._intervals(pos, length): if self.tops.get(file, 0) < end: return False return True def set_readonly(self): # may raise IOError or OSError for file in self.whandles.keys(): old = self.handles[file] old.flush() old.close() self.handles[file] = open(file, 'rb') def get_total_length(self): return self.total_length def _intervals(self, pos, amount): r = [] stop = pos + amount p = bisect_right(self.begins, pos) - 1 while p < len(self.ranges) and self.ranges[p][0] < stop: begin, end, file = self.ranges[p] r.append((file, max(pos, begin) - begin, min(end, stop) - begin)) p += 1 return r def read(self, pos, amount): r = [] for file, pos, end in self._intervals(pos, amount): h = self.handles[file] h.seek(pos) r.append(h.read(end - pos)) return ''.join(r) def write(self, pos, s): # might raise an IOError total = 0 for file, begin, end in self._intervals(pos, len(s)): if not self.whandles.has_key(file): self.handles[file].close() self.handles[file] = open(file, 'rb+') self.whandles[file] = 1 h = self.handles[file] h.seek(begin) h.write(s[total: total + end - begin]) total += end - begin def close(self): for h in self.handles.values(): h.close() def lrange(a, b, c): r = [] while a < b: r.append(a) a += c return r # everything below is for testing from fakeopen import FakeOpen def test_Storage_simple(): f = FakeOpen() m = Storage([('a', 5)], f.open, f.exists, f.getsize) assert f.files.keys() == ['a'] m.write(0, 'abc') assert m.read(0, 3) == 'abc' m.write(2, 'abc') assert m.read(2, 3) == 'abc' m.write(1, 'abc') assert m.read(0, 5) == 'aabcc' def test_Storage_multiple(): f = FakeOpen() m = Storage([('a', 5), ('2', 4), ('c', 3)], f.open, f.exists, f.getsize) x = f.files.keys() x.sort() assert x == ['2', 'a', 'c'] m.write(3, 'abc') assert m.read(3, 3) == 'abc' m.write(5, 'ab') assert m.read(4, 3) == 'bab' m.write(3, 'pqrstuvw') assert m.read(3, 8) == 'pqrstuvw' m.write(3, 'abcdef') assert m.read(3, 7) == 'abcdefv' def test_Storage_zero(): f = FakeOpen() Storage([('a', 0)], f.open, f.exists, f.getsize) assert f.files == {'a': []} def test_resume_zero(): f = FakeOpen({'a': ''}) Storage([('a', 0)], f.open, f.exists, f.getsize) assert f.files == {'a': []} def test_Storage_with_zero(): f = FakeOpen() m = Storage([('a', 3), ('b', 0), ('c', 3)], f.open, f.exists, f.getsize) m.write(2, 'abc') assert m.read(2, 3) == 'abc' x = f.files.keys() x.sort() assert x == ['a', 'b', 'c'] assert len(f.files['a']) == 3 assert len(f.files['b']) == 0 def test_Storage_resume(): f = FakeOpen({'a': 'abc'}) m = Storage([('a', 4)], f.open, f.exists, f.getsize) assert f.files.keys() == ['a'] assert m.read(0, 3) == 'abc' def test_Storage_mixed_resume(): f = FakeOpen({'b': 'abc'}) m = Storage([('a', 3), ('b', 4)], f.open, f.exists, f.getsize) x = f.files.keys() x.sort() assert x == ['a', 'b'] assert m.read(3, 3) == 'abc' PenguinTV-4.2.0/penguintv/ptvbittorrent/bencode.py0000644000000000000000000001561410646750246017221 0ustar # Written by Petru Paler # see LICENSE.txt for license information def decode_int(x, f): f += 1 newf = x.index('e', f) try: n = int(x[f:newf]) except (OverflowError, ValueError): n = long(x[f:newf]) if x[f] == '-': if x[f + 1] == '0': raise ValueError elif x[f] == '0' and newf != f+1: raise ValueError return (n, newf+1) def decode_string(x, f): colon = x.index(':', f) try: n = int(x[f:colon]) except (OverflowError, ValueError): n = long(x[f:colon]) if x[f] == '0' and colon != f+1: raise ValueError colon += 1 return (x[colon:colon+n], colon+n) def decode_list(x, f): r, f = [], f+1 while x[f] != 'e': v, f = decode_func[x[f]](x, f) r.append(v) return (r, f + 1) def decode_dict(x, f): r, f = {}, f+1 lastkey = None while x[f] != 'e': k, f = decode_string(x, f) if lastkey >= k: raise ValueError lastkey = k r[k], f = decode_func[x[f]](x, f) return (r, f + 1) decode_func = {} decode_func['l'] = decode_list decode_func['d'] = decode_dict decode_func['i'] = decode_int decode_func['0'] = decode_string decode_func['1'] = decode_string decode_func['2'] = decode_string decode_func['3'] = decode_string decode_func['4'] = decode_string decode_func['5'] = decode_string decode_func['6'] = decode_string decode_func['7'] = decode_string decode_func['8'] = decode_string decode_func['9'] = decode_string def bdecode(x): try: r, l = decode_func[x[0]](x, 0) except (IndexError, KeyError): raise ValueError if l != len(x): raise ValueError return r def test_bdecode(): try: bdecode('0:0:') assert 0 except ValueError: pass try: bdecode('ie') assert 0 except ValueError: pass try: bdecode('i341foo382e') assert 0 except ValueError: pass assert bdecode('i4e') == 4L assert bdecode('i0e') == 0L assert bdecode('i123456789e') == 123456789L assert bdecode('i-10e') == -10L try: bdecode('i-0e') assert 0 except ValueError: pass try: bdecode('i123') assert 0 except ValueError: pass try: bdecode('') assert 0 except ValueError: pass try: bdecode('i6easd') assert 0 except ValueError: pass try: bdecode('35208734823ljdahflajhdf') assert 0 except ValueError: pass try: bdecode('2:abfdjslhfld') assert 0 except ValueError: pass assert bdecode('0:') == '' assert bdecode('3:abc') == 'abc' assert bdecode('10:1234567890') == '1234567890' try: bdecode('02:xy') assert 0 except ValueError: pass try: bdecode('l') assert 0 except ValueError: pass assert bdecode('le') == [] try: bdecode('leanfdldjfh') assert 0 except ValueError: pass assert bdecode('l0:0:0:e') == ['', '', ''] try: bdecode('relwjhrlewjh') assert 0 except ValueError: pass assert bdecode('li1ei2ei3ee') == [1, 2, 3] assert bdecode('l3:asd2:xye') == ['asd', 'xy'] assert bdecode('ll5:Alice3:Bobeli2ei3eee') == [['Alice', 'Bob'], [2, 3]] try: bdecode('d') assert 0 except ValueError: pass try: bdecode('defoobar') assert 0 except ValueError: pass assert bdecode('de') == {} assert bdecode('d3:agei25e4:eyes4:bluee') == {'age': 25, 'eyes': 'blue'} assert bdecode('d8:spam.mp3d6:author5:Alice6:lengthi100000eee') == {'spam.mp3': {'author': 'Alice', 'length': 100000}} try: bdecode('d3:fooe') assert 0 except ValueError: pass try: bdecode('di1e0:e') assert 0 except ValueError: pass try: bdecode('d1:b0:1:a0:e') assert 0 except ValueError: pass try: bdecode('d1:a0:1:a0:e') assert 0 except ValueError: pass try: bdecode('i03e') assert 0 except ValueError: pass try: bdecode('l01:ae') assert 0 except ValueError: pass try: bdecode('9999:x') assert 0 except ValueError: pass try: bdecode('l0:') assert 0 except ValueError: pass try: bdecode('d0:0:') assert 0 except ValueError: pass try: bdecode('d0:') assert 0 except ValueError: pass try: bdecode('00:') assert 0 except ValueError: pass try: bdecode('l-3:e') assert 0 except ValueError: pass try: bdecode('i-03e') assert 0 except ValueError: pass bdecode('d0:i3ee') from types import StringType, IntType, LongType, DictType, ListType, TupleType class Bencached(object): __slots__ = ['bencoded'] def __init__(self, s): self.bencoded = s def encode_bencached(x,r): r.append(x.bencoded) def encode_int(x, r): r.extend(('i', str(x), 'e')) def encode_string(x, r): r.extend((str(len(x)), ':', x)) def encode_list(x, r): r.append('l') for i in x: encode_func[type(i)](i, r) r.append('e') def encode_dict(x,r): r.append('d') ilist = x.items() ilist.sort() for k, v in ilist: r.extend((str(len(k)), ':', k)) encode_func[type(v)](v, r) r.append('e') encode_func = {} encode_func[type(Bencached(0))] = encode_bencached encode_func[IntType] = encode_int encode_func[LongType] = encode_int encode_func[StringType] = encode_string encode_func[ListType] = encode_list encode_func[TupleType] = encode_list encode_func[DictType] = encode_dict try: from types import BooleanType encode_func[BooleanType] = encode_int except ImportError: pass def bencode(x): r = [] encode_func[type(x)](x, r) return ''.join(r) def test_bencode(): assert bencode(4) == 'i4e' assert bencode(0) == 'i0e' assert bencode(-10) == 'i-10e' assert bencode(12345678901234567890L) == 'i12345678901234567890e' assert bencode('') == '0:' assert bencode('abc') == '3:abc' assert bencode('1234567890') == '10:1234567890' assert bencode([]) == 'le' assert bencode([1, 2, 3]) == 'li1ei2ei3ee' assert bencode([['Alice', 'Bob'], [2, 3]]) == 'll5:Alice3:Bobeli2ei3eee' assert bencode({}) == 'de' assert bencode({'age': 25, 'eyes': 'blue'}) == 'd3:agei25e4:eyes4:bluee' assert bencode({'spam.mp3': {'author': 'Alice', 'length': 100000}}) == 'd8:spam.mp3d6:author5:Alice6:lengthi100000eee' assert bencode(Bencached(bencode(3))) == 'i3e' try: bencode({1: 'foo'}) except TypeError: return assert 0 try: import psyco psyco.bind(bdecode) psyco.bind(bencode) except ImportError: pass PenguinTV-4.2.0/penguintv/ptvbittorrent/Uploader.py0000644000000000000000000001757310646750246017403 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from CurrentRateMeasure import Measure class Upload: def __init__(self, connection, choker, storage, max_slice_length, max_rate_period, fudge): self.connection = connection self.choker = choker self.storage = storage self.max_slice_length = max_slice_length self.max_rate_period = max_rate_period self.choked = True self.interested = False self.buffer = [] self.measure = Measure(max_rate_period, fudge) if storage.do_I_have_anything(): connection.send_bitfield(storage.get_have_list()) def got_not_interested(self): if self.interested: self.interested = False del self.buffer[:] self.choker.not_interested(self.connection) def got_interested(self): if not self.interested: self.interested = True self.choker.interested(self.connection) def flushed(self): while len(self.buffer) > 0 and self.connection.is_flushed(): index, begin, length = self.buffer[0] del self.buffer[0] piece = self.storage.get_piece(index, begin, length) if piece is None: self.connection.close() return self.measure.update_rate(len(piece)) self.connection.send_piece(index, begin, piece) def got_request(self, index, begin, length): if not self.interested or length > self.max_slice_length: self.connection.close() return if not self.choked: self.buffer.append((index, begin, length)) self.flushed() def got_cancel(self, index, begin, length): try: self.buffer.remove((index, begin, length)) except ValueError: pass def choke(self): if not self.choked: self.choked = True del self.buffer[:] self.connection.send_choke() def unchoke(self): if self.choked: self.choked = False self.connection.send_unchoke() def is_choked(self): return self.choked def is_interested(self): return self.interested def has_queries(self): return len(self.buffer) > 0 def get_rate(self): return self.measure.get_rate() class DummyConnection: def __init__(self, events): self.events = events self.flushed = False def send_bitfield(self, bitfield): self.events.append(('bitfield', bitfield)) def is_flushed(self): return self.flushed def close(self): self.events.append('closed') def send_piece(self, index, begin, piece): self.events.append(('piece', index, begin, piece)) def send_choke(self): self.events.append('choke') def send_unchoke(self): self.events.append('unchoke') class DummyChoker: def __init__(self, events): self.events = events def interested(self, connection): self.events.append('interested') def not_interested(self, connection): self.events.append('not interested') class DummyStorage: def __init__(self, events): self.events = events def do_I_have_anything(self): self.events.append('do I have') return True def get_have_list(self): self.events.append('get have list') return [False, True] def get_piece(self, index, begin, length): self.events.append(('get piece', index, begin, length)) if length == 4: return None return 'a' * length def test_skip_over_choke(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) assert u.is_choked() assert not u.is_interested() u.got_interested() assert u.is_interested() u.got_request(0, 0, 3) dco.flushed = True u.flushed() assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'interested'] def test_bad_piece(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) assert u.is_choked() assert not u.is_interested() u.got_interested() assert u.is_interested() u.unchoke() assert not u.is_choked() u.got_request(0, 0, 4) dco.flushed = True u.flushed() assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'interested', 'unchoke', ('get piece', 0, 0, 4), 'closed'] def test_still_rejected_after_unchoke(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) assert u.is_choked() assert not u.is_interested() u.got_interested() assert u.is_interested() u.unchoke() assert not u.is_choked() u.got_request(0, 0, 3) u.choke() u.unchoke() dco.flushed = True u.flushed() assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'interested', 'unchoke', 'choke', 'unchoke'] def test_sends_when_flushed(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.unchoke() u.got_interested() u.got_request(0, 1, 3) dco.flushed = True u.flushed() u.flushed() assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'unchoke', 'interested', ('get piece', 0, 1, 3), ('piece', 0, 1, 'aaa')] def test_sends_immediately(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.unchoke() u.got_interested() dco.flushed = True u.got_request(0, 1, 3) assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'unchoke', 'interested', ('get piece', 0, 1, 3), ('piece', 0, 1, 'aaa')] def test_cancel(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.unchoke() u.got_interested() u.got_request(0, 1, 3) u.got_cancel(0, 1, 3) u.got_cancel(0, 1, 2) u.flushed() dco.flushed = True assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'unchoke', 'interested'] def test_clears_on_not_interested(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.unchoke() u.got_interested() u.got_request(0, 1, 3) u.got_not_interested() dco.flushed = True u.flushed() assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'unchoke', 'interested', 'not interested'] def test_close_when_sends_on_not_interested(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.got_request(0, 1, 3) assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'closed'] def test_close_over_max_length(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) u = Upload(dco, dch, ds, 100, 20, 5) u.got_interested() u.got_request(0, 1, 101) assert events == ['do I have', 'get have list', ('bitfield', [False, True]), 'interested', 'closed'] def test_no_bitfield_on_start_empty(): events = [] dco = DummyConnection(events) dch = DummyChoker(events) ds = DummyStorage(events) ds.do_I_have_anything = lambda: False u = Upload(dco, dch, ds, 100, 20, 5) assert events == [] PenguinTV-4.2.0/penguintv/ptvbittorrent/btmakemetafile.py0000755000000000000000000001074710646750246020601 0ustar #!/usr/bin/python # Written by Bram Cohen # see LICENSE.txt for license information from sys import argv from os.path import getsize, split, join, abspath, isdir from os import listdir from sha import sha from copy import copy from string import strip from BitTorrent.bencode import bencode from BitTorrent.btformats import check_info from BitTorrent.parseargs import parseargs, formatDefinitions from threading import Event from time import time defaults = [ ('piece_size_pow2', 18, "which power of 2 to set the piece size to"), ('comment', '', "optional human-readable comment to put in .torrent"), ('target', '', "optional target file for the torrent") ] ignore = ['core', 'CVS'] def dummy(v): pass def make_meta_file(file, url, piece_len_exp, flag = Event(), progress = dummy, progress_percent=1, comment = None, target = None): if piece_len_exp == None: piece_len_exp = 18 piece_length = 2 ** piece_len_exp a, b = split(file) if not target: if b == '': f = a + '.torrent' else: f = join(a, b + '.torrent') else: f = target info = makeinfo(file, piece_length, flag, progress, progress_percent) if flag.isSet(): return check_info(info) h = open(f, 'wb') data = {'info': info, 'announce': strip(url), 'creation date': long(time())} if comment: data['comment'] = comment h.write(bencode(data)) h.close() def calcsize(file): if not isdir(file): return getsize(file) total = 0 for s in subfiles(abspath(file)): total += getsize(s[1]) return total def makeinfo(file, piece_length, flag, progress, progress_percent=1): file = abspath(file) if isdir(file): subs = subfiles(file) subs.sort() pieces = [] sh = sha() done = 0 fs = [] totalsize = 0.0 totalhashed = 0 for p, f in subs: totalsize += getsize(f) for p, f in subs: pos = 0 size = getsize(f) fs.append({'length': size, 'path': p}) h = open(f, 'rb') while pos < size: a = min(size - pos, piece_length - done) sh.update(h.read(a)) if flag.isSet(): return done += a pos += a totalhashed += a if done == piece_length: pieces.append(sh.digest()) done = 0 sh = sha() if progress_percent: progress(totalhashed / totalsize) else: progress(a) h.close() if done > 0: pieces.append(sh.digest()) return {'pieces': ''.join(pieces), 'piece length': piece_length, 'files': fs, 'name': split(file)[1]} else: size = getsize(file) pieces = [] p = 0 h = open(file, 'rb') while p < size: x = h.read(min(piece_length, size - p)) if flag.isSet(): return pieces.append(sha(x).digest()) p += piece_length if p > size: p = size if progress_percent: progress(float(p) / size) else: progress(min(piece_length, size - p)) h.close() return {'pieces': ''.join(pieces), 'piece length': piece_length, 'length': size, 'name': split(file)[1]} def subfiles(d): r = [] stack = [([], d)] while len(stack) > 0: p, n = stack.pop() if isdir(n): for s in listdir(n): if s not in ignore and s[:1] != '.': stack.append((copy(p) + [s], join(n, s))) else: r.append((p, n)) return r def prog(amount): print '%.1f%% complete\r' % (amount * 100), if __name__ == '__main__': if len(argv) < 3: print 'usage is -' print argv[0] + ' file trackerurl [params]' print print formatDefinitions(defaults, 80) else: try: config, args = parseargs(argv[3:], defaults, 0, 0) make_meta_file(argv[1], argv[2], config['piece_size_pow2'], progress = prog, comment = config['comment'], target = config['target']) except ValueError, e: print 'error: ' + str(e) print 'run with no args for parameter explanations' PenguinTV-4.2.0/penguintv/ptvbittorrent/Choker.py0000644000000000000000000002334310646750246017033 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from random import randrange class Choker: def __init__(self, max_uploads, schedule, done = lambda: False, min_uploads = None): self.max_uploads = max_uploads if min_uploads is None: min_uploads = max_uploads self.min_uploads = min_uploads self.schedule = schedule self.connections = [] self.count = 0 self.done = done schedule(self._round_robin, 10) def _round_robin(self): self.schedule(self._round_robin, 10) self.count += 1 if self.count % 3 == 0: for i in xrange(len(self.connections)): u = self.connections[i].get_upload() if u.is_choked() and u.is_interested(): self.connections = self.connections[i:] + self.connections[:i] break self._rechoke() def _snubbed(self, c): if self.done(): return False return c.get_download().is_snubbed() def _rate(self, c): if self.done(): return c.get_upload().get_rate() else: return c.get_download().get_rate() def _rechoke(self): preferred = [] for c in self.connections: if not self._snubbed(c) and c.get_upload().is_interested(): preferred.append((-self._rate(c), c)) preferred.sort() del preferred[self.max_uploads - 1:] preferred = [x[1] for x in preferred] count = len(preferred) hit = False for c in self.connections: u = c.get_upload() if c in preferred: u.unchoke() else: if count < self.min_uploads or not hit: u.unchoke() if u.is_interested(): count += 1 hit = True else: u.choke() def connection_made(self, connection, p = None): if p is None: p = randrange(-2, len(self.connections) + 1) self.connections.insert(max(p, 0), connection) self._rechoke() def connection_lost(self, connection): self.connections.remove(connection) if connection.get_upload().is_interested() and not connection.get_upload().is_choked(): self._rechoke() def interested(self, connection): if not connection.get_upload().is_choked(): self._rechoke() def not_interested(self, connection): if not connection.get_upload().is_choked(): self._rechoke() def change_max_uploads(self, newval): def foo(self=self, newval=newval): self._change_max_uploads(newval) self.schedule(foo, 0); def _change_max_uploads(self, newval): self.max_uploads = newval self._rechoke() class DummyScheduler: def __init__(self): self.s = [] def __call__(self, func, delay): self.s.append((func, delay)) class DummyConnection: def __init__(self, v = 0): self.u = DummyUploader() self.d = DummyDownloader(self) self.v = v def get_upload(self): return self.u def get_download(self): return self.d class DummyDownloader: def __init__(self, c): self.s = False self.c = c def is_snubbed(self): return self.s def get_rate(self): return self.c.v class DummyUploader: def __init__(self): self.i = False self.c = True def choke(self): if not self.c: self.c = True def unchoke(self): if self.c: self.c = False def is_choked(self): return self.c def is_interested(self): return self.i def test_round_robin_with_no_downloads(): s = DummyScheduler() Choker(2, s) assert len(s.s) == 1 assert s.s[0][1] == 10 s.s[0][0]() del s.s[0] assert len(s.s) == 1 assert s.s[0][1] == 10 s.s[0][0]() del s.s[0] s.s[0][0]() del s.s[0] s.s[0][0]() del s.s[0] def test_resort(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection() c2 = DummyConnection(1) c3 = DummyConnection(2) c4 = DummyConnection(3) c2.u.i = True c3.u.i = True choker.connection_made(c1) assert not c1.u.c choker.connection_made(c2, 1) assert not c1.u.c assert not c2.u.c choker.connection_made(c3, 1) assert not c1.u.c assert c2.u.c assert not c3.u.c c2.v = 2 c3.v = 1 choker.connection_made(c4, 1) assert not c1.u.c assert c2.u.c assert not c3.u.c assert not c4.u.c choker.connection_lost(c4) assert not c1.u.c assert c2.u.c assert not c3.u.c s.s[0][0]() assert not c1.u.c assert c2.u.c assert not c3.u.c def test_interest(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection() c2 = DummyConnection(1) c3 = DummyConnection(2) c2.u.i = True c3.u.i = True choker.connection_made(c1) assert not c1.u.c choker.connection_made(c2, 1) assert not c1.u.c assert not c2.u.c choker.connection_made(c3, 1) assert not c1.u.c assert c2.u.c assert not c3.u.c c3.u.i = False choker.not_interested(c3) assert not c1.u.c assert not c2.u.c assert not c3.u.c c3.u.i = True choker.interested(c3) assert not c1.u.c assert c2.u.c assert not c3.u.c choker.connection_lost(c3) assert not c1.u.c assert not c2.u.c def test_robin_interest(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection(0) c2 = DummyConnection(1) c1.u.i = True choker.connection_made(c2) assert not c2.u.c choker.connection_made(c1, 0) assert not c1.u.c assert c2.u.c c1.u.i = False choker.not_interested(c1) assert not c1.u.c assert not c2.u.c c1.u.i = True choker.interested(c1) assert not c1.u.c assert c2.u.c choker.connection_lost(c1) assert not c2.u.c def test_skip_not_interested(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection(0) c2 = DummyConnection(1) c3 = DummyConnection(2) c1.u.i = True c3.u.i = True choker.connection_made(c2) assert not c2.u.c choker.connection_made(c1, 0) assert not c1.u.c assert c2.u.c choker.connection_made(c3, 2) assert not c1.u.c assert c2.u.c assert c3.u.c f = s.s[0][0] f() assert not c1.u.c assert c2.u.c assert c3.u.c f() assert not c1.u.c assert c2.u.c assert c3.u.c f() assert c1.u.c assert c2.u.c assert not c3.u.c def test_connection_lost_no_interrupt(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection(0) c2 = DummyConnection(1) c3 = DummyConnection(2) c1.u.i = True c2.u.i = True c3.u.i = True choker.connection_made(c1) choker.connection_made(c2, 1) choker.connection_made(c3, 2) f = s.s[0][0] f() assert not c1.u.c assert c2.u.c assert c3.u.c f() assert not c1.u.c assert c2.u.c assert c3.u.c f() assert c1.u.c assert not c2.u.c assert c3.u.c f() assert c1.u.c assert not c2.u.c assert c3.u.c f() assert c1.u.c assert not c2.u.c assert c3.u.c choker.connection_lost(c3) assert c1.u.c assert not c2.u.c f() assert not c1.u.c assert c2.u.c choker.connection_lost(c2) assert not c1.u.c def test_connection_made_no_interrupt(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection(0) c2 = DummyConnection(1) c3 = DummyConnection(2) c1.u.i = True c2.u.i = True c3.u.i = True choker.connection_made(c1) choker.connection_made(c2, 1) f = s.s[0][0] assert not c1.u.c assert c2.u.c f() assert not c1.u.c assert c2.u.c f() assert not c1.u.c assert c2.u.c choker.connection_made(c3, 1) assert not c1.u.c assert c2.u.c assert c3.u.c f() assert c1.u.c assert c2.u.c assert not c3.u.c def test_round_robin(): s = DummyScheduler() choker = Choker(1, s) c1 = DummyConnection(0) c2 = DummyConnection(1) c1.u.i = True c2.u.i = True choker.connection_made(c1) choker.connection_made(c2, 1) f = s.s[0][0] assert not c1.u.c assert c2.u.c f() assert not c1.u.c assert c2.u.c f() assert not c1.u.c assert c2.u.c f() assert c1.u.c assert not c2.u.c f() assert c1.u.c assert not c2.u.c f() assert c1.u.c assert not c2.u.c f() assert not c1.u.c assert c2.u.c def test_multi(): s = DummyScheduler() choker = Choker(4, s) c1 = DummyConnection(0) c2 = DummyConnection(0) c3 = DummyConnection(0) c4 = DummyConnection(8) c5 = DummyConnection(0) c6 = DummyConnection(0) c7 = DummyConnection(6) c8 = DummyConnection(0) c9 = DummyConnection(9) c10 = DummyConnection(7) c11 = DummyConnection(10) choker.connection_made(c1, 0) choker.connection_made(c2, 1) choker.connection_made(c3, 2) choker.connection_made(c4, 3) choker.connection_made(c5, 4) choker.connection_made(c6, 5) choker.connection_made(c7, 6) choker.connection_made(c8, 7) choker.connection_made(c9, 8) choker.connection_made(c10, 9) choker.connection_made(c11, 10) c2.u.i = True c4.u.i = True c6.u.i = True c8.u.i = True c10.u.i = True c2.d.s = True c6.d.s = True c8.d.s = True s.s[0][0]() assert not c1.u.c assert not c2.u.c assert not c3.u.c assert not c4.u.c assert not c5.u.c assert not c6.u.c assert c7.u.c assert c8.u.c assert c9.u.c assert not c10.u.c assert c11.u.c PenguinTV-4.2.0/penguintv/ptvbittorrent/testtest.py0000644000000000000000000000400410646750246017470 0ustar """ A much simpler testing framework than PyUnit tests a module by running all functions in it whose name starts with 'test' a test fails if it raises an exception, otherwise it passes functions are try_all and try_single """ # Written by Bram Cohen # see LICENSE.txt for license information from traceback import print_exc from sys import modules def try_all(excludes = [], excluded_paths=[]): """ tests all imported modules takes an optional list of module names and/or module objects to skip over. modules from files under under any of excluded_paths are also skipped. """ failed = [] for modulename, module in modules.items(): # skip builtins if not hasattr(module, '__file__'): continue # skip modules under any of excluded_paths if [p for p in excluded_paths if module.__file__.startswith(p)]: continue if modulename not in excludes and module not in excludes: try_module(module, modulename, failed) print_failed(failed) def try_single(m): """ tests a single module accepts either a module object or a module name in string form """ if type(m) is str: modulename = m module = __import__(m) else: modulename = str(m) module = m failed = [] try_module(module, modulename, failed) print_failed(failed) def try_module(module, modulename, failed): if not hasattr(module, '__dict__'): return for n, func in module.__dict__.items(): if not callable(func) or n[:4] != 'test': continue name = modulename + '.' + n try: print 'trying ' + name func() print 'passed ' + name except: print_exc() failed.append(name) print 'failed ' + name def print_failed(failed): print if len(failed) == 0: print 'everything passed' else: print 'the following tests failed:' for i in failed: print i PenguinTV-4.2.0/penguintv/ptvbittorrent/__init__.py0000644000000000000000000000002210646750246017344 0ustar version = '3.4.2' PenguinTV-4.2.0/penguintv/ptvbittorrent/download.py0000644000000000000000000003104610646750246017426 0ustar # Written by Bram Cohen # see LICENSE.txt for license information #edited by Owen Williams to support more args from zurllib import urlopen from urlparse import urljoin from btformats import check_message from Choker import Choker from Storage import Storage from StorageWrapper import StorageWrapper from Uploader import Upload from Downloader import Downloader from Connecter import Connecter from Encrypter import Encoder from RawServer import RawServer from Rerequester import Rerequester from DownloaderFeedback import DownloaderFeedback from RateMeasure import RateMeasure from CurrentRateMeasure import Measure from PiecePicker import PiecePicker from bencode import bencode, bdecode from __init__ import version from binascii import b2a_hex from sha import sha from os import path, makedirs from parseargs import parseargs, formatDefinitions from socket import error as socketerror from random import seed from threading import Thread, Event from time import time try: from os import getpid except ImportError: def getpid(): return 1 defaults = [ ('max_uploads', 7, "the maximum number of uploads to allow at once."), ('keepalive_interval', 120.0, 'number of seconds to pause between sending keepalives'), ('download_slice_size', 2 ** 14, "How many bytes to query for per request."), ('request_backlog', 5, "how many requests to keep in a single pipe at once."), ('max_message_length', 2 ** 23, "maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."), ('ip', '', "ip to report you have to the tracker."), ('minport', 6881, 'minimum port to listen on, counts up if unavailable'), ('maxport', 6999, 'maximum port to listen on'), ('responsefile', '', 'file the server response was stored in, alternative to url'), ('url', '', 'url to get file from, alternative to responsefile'), ('saveas', '', 'local file name to save the file as, null indicates query user'), ('timeout', 300.0, 'time to wait between closing sockets which nothing has been received on'), ('timeout_check_interval', 60.0, 'time to wait between checking if any connections have timed out'), ('max_slice_length', 2 ** 17, "maximum length slice to send to peers, larger requests are ignored"), ('max_rate_period', 20.0, "maximum amount of time to guess the current rate estimate represents"), ('bind', '', 'ip to bind to locally'), ('upload_rate_fudge', 5.0, 'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'), ('display_interval', .5, 'time between updates of displayed information'), ('rerequest_interval', 5 * 60, 'time to wait between requesting more peers'), ('min_peers', 20, 'minimum number of peers to not do rerequesting'), ('http_timeout', 60, 'number of seconds to wait before assuming that an http connection has timed out'), ('max_initiate', 35, 'number of peers at which to stop initiating new connections'), ('max_allow_in', 55, 'maximum number of connections to allow, after this new incoming connections will be immediately closed'), ('check_hashes', 1, 'whether to check hashes on disk'), ('max_upload_rate', 0, 'maximum kB/s to upload at, 0 means no limit'), ('snub_time', 30.0, "seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"), ('spew', 0, "whether to display diagnostic info to stdout"), ('rarest_first_cutoff', 4, "number of downloads at which to switch from random to rarest first"), ('min_uploads', 4, "the number of uploads to fill out to with extra optimistic unchokes"), ('report_hash_failures', 0, "whether to inform the user that hash failures occur. They're non-fatal."), ] def download(params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols, pathFunc = None, paramfunc = None, spewflag = Event()): if len(params) == 0: errorfunc('arguments are -\n' + formatDefinitions(defaults, cols)) return try: config, args = parseargs(params, defaults) #OWEN if args: if config.get('responsefile', None) == None: raise ValueError, 'must have responsefile as arg or parameter, not both' if path.isfile(args[0]): config['responsefile'] = args[0] else: config['url'] = args[0] if (config['responsefile'] == '') == (config['url'] == ''): raise ValueError, 'need responsefile or url' except ValueError, e: errorfunc('error: ' + str(e) + '\nrun with no args for parameter explanations') return try: if config['responsefile'] != '': h = open(config['responsefile'], 'rb') else: h = urlopen(config['url']) response = h.read() h.close() except IOError, e: if config['responsefile'] != '' and config['responsefile'].find('Temporary Internet Files') != -1: errorfunc('BitTorrent was passed a filename that doesn\'t exist. ' + 'Either clear your Temporary Internet Files or right-click the link ' + 'and save the .torrent to disk first.') else: errorfunc('problem getting response info - ' + str(e)) return try: response = bdecode(response) check_message(response) except ValueError, e: errorfunc("got bad file info - " + str(e)) return try: def make(f, forcedir = False): if not forcedir: f = path.split(f)[0] if f != '' and not path.exists(f): makedirs(f) info = response['info'] if info.has_key('length'): file_length = info['length'] file = filefunc(info['name'], file_length, config['saveas'], False) if file is None: return make(file) files = [(file, file_length)] else: file_length = 0 for x in info['files']: file_length += x['length'] file = filefunc(info['name'], file_length, config['saveas'], True) if file is None: return # if this path exists, and no files from the info dict exist, we assume it's a new download and # the user wants to create a new directory with the default name existing = 0 if path.exists(file): for x in info['files']: if path.exists(path.join(file, x['path'][0])): existing = 1 if not existing: file = path.join(file, info['name']) make(file, True) # alert the UI to any possible change in path if pathFunc != None: pathFunc(file) files = [] for x in info['files']: n = file for i in x['path']: n = path.join(n, i) files.append((n, x['length'])) make(n) except OSError, e: errorfunc("Couldn't allocate dir - " + str(e)) return finflag = Event() ann = [None] myid = 'M' + version.replace('.', '-') myid = myid + ('-' * (8 - len(myid))) + b2a_hex(sha(repr(time()) + ' ' + str(getpid())).digest()[-6:]) seed(myid) pieces = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] def failed(reason, errorfunc = errorfunc, doneflag = doneflag): doneflag.set() if reason is not None: errorfunc(reason) rawserver = RawServer(doneflag, config['timeout_check_interval'], config['timeout'], errorfunc = errorfunc, maxconnects = config['max_allow_in']) try: try: storage = Storage(files, open, path.exists, path.getsize) except IOError, e: errorfunc('trouble accessing files - ' + str(e)) return def finished(finfunc = finfunc, finflag = finflag, ann = ann, storage = storage, errorfunc = errorfunc): finflag.set() try: storage.set_readonly() except (IOError, OSError), e: errorfunc('trouble setting readonly at end - ' + str(e)) if ann[0] is not None: ann[0](1) finfunc() rm = [None] def data_flunked(amount, rm = rm, errorfunc = errorfunc, report_hash_failures = config['report_hash_failures']): if rm[0] is not None: rm[0](amount) if report_hash_failures: errorfunc('a piece failed hash check, re-downloading it') storagewrapper = StorageWrapper(storage, config['download_slice_size'], pieces, info['piece length'], finished, failed, statusfunc, doneflag, config['check_hashes'], data_flunked) except ValueError, e: failed('bad data - ' + str(e)) except IOError, e: failed('IOError - ' + str(e)) if doneflag.isSet(): return e = 'maxport less than minport - no ports to check' for listen_port in xrange(config['minport'], config['maxport'] + 1): try: rawserver.bind(listen_port, config['bind']) break except socketerror, e: pass else: errorfunc("Couldn't listen - " + str(e)) return choker = Choker(config['max_uploads'], rawserver.add_task, finflag.isSet, config['min_uploads']) upmeasure = Measure(config['max_rate_period'], config['upload_rate_fudge']) downmeasure = Measure(config['max_rate_period']) def make_upload(connection, choker = choker, storagewrapper = storagewrapper, max_slice_length = config['max_slice_length'], max_rate_period = config['max_rate_period'], fudge = config['upload_rate_fudge']): return Upload(connection, choker, storagewrapper, max_slice_length, max_rate_period, fudge) ratemeasure = RateMeasure(storagewrapper.get_amount_left()) rm[0] = ratemeasure.data_rejected picker = PiecePicker(len(pieces), config['rarest_first_cutoff']) for i in xrange(len(pieces)): if storagewrapper.do_I_have(i): picker.complete(i) downloader = Downloader(storagewrapper, picker, config['request_backlog'], config['max_rate_period'], len(pieces), downmeasure, config['snub_time'], ratemeasure.data_came_in) connecter = Connecter(make_upload, downloader, choker, len(pieces), upmeasure, config['max_upload_rate'] * 1024, rawserver.add_task) infohash = sha(bencode(info)).digest() encoder = Encoder(connecter, rawserver, myid, config['max_message_length'], rawserver.add_task, config['keepalive_interval'], infohash, config['max_initiate']) rerequest = Rerequester(response['announce'], config['rerequest_interval'], rawserver.add_task, connecter.how_many_connections, config['min_peers'], encoder.start_connection, rawserver.add_task, storagewrapper.get_amount_left, upmeasure.get_total, downmeasure.get_total, listen_port, config['ip'], myid, infohash, config['http_timeout'], errorfunc, config['max_initiate'], doneflag, upmeasure.get_rate, downmeasure.get_rate, encoder.ever_got_incoming) if config['spew']: spewflag.set() DownloaderFeedback(choker, rawserver.add_task, statusfunc, upmeasure.get_rate, downmeasure.get_rate, upmeasure.get_total, downmeasure.get_total, ratemeasure.get_time_left, ratemeasure.get_size_left, file_length, finflag, config['display_interval'], spewflag) # useful info and functions for the UI if paramfunc: paramfunc({ 'max_upload_rate' : connecter.change_max_upload_rate, # change_max_upload_rate() 'max_uploads': choker.change_max_uploads, # change_max_uploads() 'listen_port' : listen_port, # int 'peer_id' : myid, # string 'info_hash' : infohash, # string 'start_connection' : encoder._start_connection # start_connection((, ), ) }) statusfunc({"activity" : 'connecting to peers'}) ann[0] = rerequest.announce rerequest.begin() rawserver.listen_forever(encoder) storage.close() rerequest.announce(2) PenguinTV-4.2.0/penguintv/ptvbittorrent/fakeopen.py0000644000000000000000000000430010646750246017400 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from string import join class FakeHandle: def __init__(self, name, fakeopen): self.name = name self.fakeopen = fakeopen self.pos = 0 def flush(self): pass def close(self): pass def seek(self, pos): self.pos = pos def read(self, amount = None): old = self.pos f = self.fakeopen.files[self.name] if self.pos >= len(f): return '' if amount is None: self.pos = len(f) return join(f[old:], '') else: self.pos = min(len(f), old + amount) return join(f[old:self.pos], '') def write(self, s): f = self.fakeopen.files[self.name] while len(f) < self.pos: f.append(chr(0)) self.fakeopen.files[self.name][self.pos : self.pos + len(s)] = list(s) self.pos += len(s) class FakeOpen: def __init__(self, initial = {}): self.files = {} for key, value in initial.items(): self.files[key] = list(value) def open(self, filename, mode): """currently treats everything as rw - doesn't support append""" self.files.setdefault(filename, []) return FakeHandle(filename, self) def exists(self, file): return self.files.has_key(file) def getsize(self, file): return len(self.files[file]) def test_normal(): f = FakeOpen({'f1': 'abcde'}) assert f.exists('f1') assert not f.exists('f2') assert f.getsize('f1') == 5 h = f.open('f1', 'rw') assert h.read(3) == 'abc' assert h.read(1) == 'd' assert h.read() == 'e' assert h.read(2) == '' h.write('fpq') h.seek(4) assert h.read(2) == 'ef' h.write('ghij') h.seek(0) assert h.read() == 'abcdefghij' h.seek(2) h.write('p') h.write('q') assert h.read(1) == 'e' h.seek(1) assert h.read(5) == 'bpqef' h2 = f.open('f2', 'rw') assert h2.read() == '' h2.write('mnop') h2.seek(1) assert h2.read() == 'nop' assert f.exists('f1') assert f.exists('f2') assert f.getsize('f1') == 10 assert f.getsize('f2') == 4 PenguinTV-4.2.0/penguintv/ptvbittorrent/Connecter.py0000644000000000000000000002506210646750246017540 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from bitfield import Bitfield from binascii import b2a_hex from CurrentRateMeasure import Measure def toint(s): return long(b2a_hex(s), 16) def tobinary(i): return (chr(i >> 24) + chr((i >> 16) & 0xFF) + chr((i >> 8) & 0xFF) + chr(i & 0xFF)) CHOKE = chr(0) UNCHOKE = chr(1) INTERESTED = chr(2) NOT_INTERESTED = chr(3) # index HAVE = chr(4) # index, bitfield BITFIELD = chr(5) # index, begin, length REQUEST = chr(6) # index, begin, piece PIECE = chr(7) # index, begin, piece CANCEL = chr(8) class Connection: def __init__(self, connection, connecter): self.connection = connection self.connecter = connecter self.got_anything = False def get_ip(self): return self.connection.get_ip() def get_id(self): return self.connection.get_id() def close(self): self.connection.close() def is_flushed(self): if self.connecter.rate_capped: return False return self.connection.is_flushed() def is_locally_initiated(self): return self.connection.is_locally_initiated() def send_interested(self): self.connection.send_message(INTERESTED) def send_not_interested(self): self.connection.send_message(NOT_INTERESTED) def send_choke(self): self.connection.send_message(CHOKE) def send_unchoke(self): self.connection.send_message(UNCHOKE) def send_request(self, index, begin, length): self.connection.send_message(REQUEST + tobinary(index) + tobinary(begin) + tobinary(length)) def send_cancel(self, index, begin, length): self.connection.send_message(CANCEL + tobinary(index) + tobinary(begin) + tobinary(length)) def send_piece(self, index, begin, piece): assert not self.connecter.rate_capped self.connecter._update_upload_rate(len(piece)) self.connection.send_message(PIECE + tobinary(index) + tobinary(begin) + piece) def send_bitfield(self, bitfield): self.connection.send_message(BITFIELD + bitfield) def send_have(self, index): self.connection.send_message(HAVE + tobinary(index)) def get_upload(self): return self.upload def get_download(self): return self.download class Connecter: def __init__(self, make_upload, downloader, choker, numpieces, totalup, max_upload_rate = 0, sched = None): self.downloader = downloader self.make_upload = make_upload self.choker = choker self.numpieces = numpieces self.max_upload_rate = max_upload_rate self.sched = sched self.totalup = totalup self.rate_capped = False self.connections = {} def _update_upload_rate(self, amount): self.totalup.update_rate(amount) if self.max_upload_rate > 0 and self.totalup.get_rate_noupdate() > self.max_upload_rate: self.rate_capped = True self.sched(self._uncap, self.totalup.time_until_rate(self.max_upload_rate)) def _uncap(self): self.rate_capped = False while not self.rate_capped: up = None minrate = None for i in self.connections.values(): if not i.upload.is_choked() and i.upload.has_queries() and i.connection.is_flushed(): rate = i.upload.get_rate() if up is None or rate < minrate: up = i.upload minrate = rate if up is None: break up.flushed() if self.totalup.get_rate_noupdate() > self.max_upload_rate: break def change_max_upload_rate(self, newval): def foo(self=self, newval=newval): self._change_max_upload_rate(newval) self.sched(foo, 0); def _change_max_upload_rate(self, newval): self.max_upload_rate = newval self._uncap() def how_many_connections(self): return len(self.connections) def connection_made(self, connection): c = Connection(connection, self) self.connections[connection] = c c.upload = self.make_upload(c) c.download = self.downloader.make_download(c) self.choker.connection_made(c) def connection_lost(self, connection): c = self.connections[connection] d = c.download del self.connections[connection] d.disconnected() self.choker.connection_lost(c) def connection_flushed(self, connection): self.connections[connection].upload.flushed() def got_message(self, connection, message): c = self.connections[connection] t = message[0] if t == BITFIELD and c.got_anything: connection.close() return c.got_anything = True if (t in [CHOKE, UNCHOKE, INTERESTED, NOT_INTERESTED] and len(message) != 1): connection.close() return if t == CHOKE: c.download.got_choke() elif t == UNCHOKE: c.download.got_unchoke() elif t == INTERESTED: c.upload.got_interested() elif t == NOT_INTERESTED: c.upload.got_not_interested() elif t == HAVE: if len(message) != 5: connection.close() return i = toint(message[1:]) if i >= self.numpieces: connection.close() return c.download.got_have(i) elif t == BITFIELD: try: b = Bitfield(self.numpieces, message[1:]) except ValueError: connection.close() return c.download.got_have_bitfield(b) elif t == REQUEST: if len(message) != 13: connection.close() return i = toint(message[1:5]) if i >= self.numpieces: connection.close() return c.upload.got_request(i, toint(message[5:9]), toint(message[9:])) elif t == CANCEL: if len(message) != 13: connection.close() return i = toint(message[1:5]) if i >= self.numpieces: connection.close() return c.upload.got_cancel(i, toint(message[5:9]), toint(message[9:])) elif t == PIECE: if len(message) <= 9: connection.close() return i = toint(message[1:5]) if i >= self.numpieces: connection.close() return if c.download.got_piece(i, toint(message[5:9]), message[9:]): for co in self.connections.values(): co.send_have(i) else: connection.close() class DummyUpload: def __init__(self, events): self.events = events events.append('made upload') def flushed(self): self.events.append('flushed') def got_interested(self): self.events.append('interested') def got_not_interested(self): self.events.append('not interested') def got_request(self, index, begin, length): self.events.append(('request', index, begin, length)) def got_cancel(self, index, begin, length): self.events.append(('cancel', index, begin, length)) class DummyDownload: def __init__(self, events): self.events = events events.append('made download') self.hit = 0 def disconnected(self): self.events.append('disconnected') def got_choke(self): self.events.append('choke') def got_unchoke(self): self.events.append('unchoke') def got_have(self, i): self.events.append(('have', i)) def got_have_bitfield(self, bitfield): self.events.append(('bitfield', bitfield.tostring())) def got_piece(self, index, begin, piece): self.events.append(('piece', index, begin, piece)) self.hit += 1 return self.hit > 1 class DummyDownloader: def __init__(self, events): self.events = events def make_download(self, connection): return DummyDownload(self.events) class DummyConnection: def __init__(self, events): self.events = events def send_message(self, message): self.events.append(('m', message)) class DummyChoker: def __init__(self, events, cs): self.events = events self.cs = cs def connection_made(self, c): self.events.append('made') self.cs.append(c) def connection_lost(self, c): self.events.append('lost') def test_operation(): events = [] cs = [] co = Connecter(lambda c, events = events: DummyUpload(events), DummyDownloader(events), DummyChoker(events, cs), 3, Measure(10)) assert events == [] assert cs == [] dc = DummyConnection(events) co.connection_made(dc) assert len(cs) == 1 cc = cs[0] co.got_message(dc, BITFIELD + chr(0xc0)) co.got_message(dc, CHOKE) co.got_message(dc, UNCHOKE) co.got_message(dc, INTERESTED) co.got_message(dc, NOT_INTERESTED) co.got_message(dc, HAVE + tobinary(2)) co.got_message(dc, REQUEST + tobinary(1) + tobinary(5) + tobinary(6)) co.got_message(dc, CANCEL + tobinary(2) + tobinary(3) + tobinary(4)) co.got_message(dc, PIECE + tobinary(1) + tobinary(0) + 'abc') co.got_message(dc, PIECE + tobinary(1) + tobinary(3) + 'def') co.connection_flushed(dc) cc.send_bitfield(chr(0x60)) cc.send_interested() cc.send_not_interested() cc.send_choke() cc.send_unchoke() cc.send_have(4) cc.send_request(0, 2, 1) cc.send_cancel(1, 2, 3) cc.send_piece(1, 2, 'abc') co.connection_lost(dc) x = ['made upload', 'made download', 'made', ('bitfield', chr(0xC0)), 'choke', 'unchoke', 'interested', 'not interested', ('have', 2), ('request', 1, 5, 6), ('cancel', 2, 3, 4), ('piece', 1, 0, 'abc'), ('piece', 1, 3, 'def'), ('m', HAVE + tobinary(1)), 'flushed', ('m', BITFIELD + chr(0x60)), ('m', INTERESTED), ('m', NOT_INTERESTED), ('m', CHOKE), ('m', UNCHOKE), ('m', HAVE + tobinary(4)), ('m', REQUEST + tobinary(0) + tobinary(2) + tobinary(1)), ('m', CANCEL + tobinary(1) + tobinary(2) + tobinary(3)), ('m', PIECE + tobinary(1) + tobinary(2) + 'abc'), 'disconnected', 'lost'] for a, b in zip (events, x): assert a == b, repr((a, b)) def test_conversion(): assert toint(tobinary(50000)) == 50000 PenguinTV-4.2.0/penguintv/ptvbittorrent/CurrentRateMeasure.py0000644000000000000000000000177010646750246021400 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from time import time class Measure: def __init__(self, max_rate_period, fudge = 1): self.max_rate_period = max_rate_period self.ratesince = time() - fudge self.last = self.ratesince self.rate = 0.0 self.total = 0l def update_rate(self, amount): self.total += amount t = time() self.rate = (self.rate * (self.last - self.ratesince) + amount) / (t - self.ratesince) self.last = t if self.ratesince < t - self.max_rate_period: self.ratesince = t - self.max_rate_period def get_rate(self): self.update_rate(0) return self.rate def get_rate_noupdate(self): return self.rate def time_until_rate(self, newrate): if self.rate <= newrate: return 0 t = time() - self.ratesince return ((self.rate * t) / newrate) - t def get_total(self): return self.total PenguinTV-4.2.0/penguintv/ptvbittorrent/btformats.py0000644000000000000000000000736710646750246017631 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from types import StringType, LongType, IntType, ListType, DictType from re import compile reg = compile(r'^[^/\\.~][^/\\]*$') ints = (LongType, IntType) def check_info(info): if type(info) != DictType: raise ValueError, 'bad metainfo - not a dictionary' pieces = info.get('pieces') if type(pieces) != StringType or len(pieces) % 20 != 0: raise ValueError, 'bad metainfo - bad pieces key' piecelength = info.get('piece length') if type(piecelength) not in ints or piecelength <= 0: raise ValueError, 'bad metainfo - illegal piece length' name = info.get('name') if type(name) != StringType: raise ValueError, 'bad metainfo - bad name' if not reg.match(name): raise ValueError, 'name %s disallowed for security reasons' % name if info.has_key('files') == info.has_key('length'): raise ValueError, 'single/multiple file mix' if info.has_key('length'): length = info.get('length') if type(length) not in ints or length < 0: raise ValueError, 'bad metainfo - bad length' else: files = info.get('files') if type(files) != ListType: raise ValueError for f in files: if type(f) != DictType: raise ValueError, 'bad metainfo - bad file value' length = f.get('length') if type(length) not in ints or length < 0: raise ValueError, 'bad metainfo - bad length' path = f.get('path') if type(path) != ListType or path == []: raise ValueError, 'bad metainfo - bad path' for p in path: if type(p) != StringType: raise ValueError, 'bad metainfo - bad path dir' if not reg.match(p): raise ValueError, 'path %s disallowed for security reasons' % p for i in xrange(len(files)): for j in xrange(i): if files[i]['path'] == files[j]['path']: raise ValueError, 'bad metainfo - duplicate path' def check_message(message): if type(message) != DictType: raise ValueError check_info(message.get('info')) if type(message.get('announce')) != StringType: raise ValueError def check_peers(message): if type(message) != DictType: raise ValueError if message.has_key('failure reason'): if type(message['failure reason']) != StringType: raise ValueError return peers = message.get('peers') if type(peers) == ListType: for p in peers: if type(p) != DictType: raise ValueError if type(p.get('ip')) != StringType: raise ValueError port = p.get('port') if type(port) not in ints or p <= 0: raise ValueError if p.has_key('peer id'): id = p.get('peer id') if type(id) != StringType or len(id) != 20: raise ValueError elif type(peers) != StringType or len(peers) % 6 != 0: raise ValueError interval = message.get('interval', 1) if type(interval) not in ints or interval <= 0: raise ValueError minint = message.get('min interval', 1) if type(minint) not in ints or minint <= 0: raise ValueError if type(message.get('tracker id', '')) != StringType: raise ValueError npeers = message.get('num peers', 0) if type(npeers) not in ints or npeers < 0: raise ValueError dpeers = message.get('done peers', 0) if type(dpeers) not in ints or dpeers < 0: raise ValueError last = message.get('last', 0) if type(last) not in ints or last < 0: raise ValueError PenguinTV-4.2.0/penguintv/ptvbittorrent/Rerequester.py0000644000000000000000000001302510646750246020122 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from zurllib import urlopen, quote from btformats import check_peers from bencode import bdecode from threading import Thread, Lock from socket import error from time import time from random import randrange from binascii import b2a_hex class Rerequester: def __init__(self, url, interval, sched, howmany, minpeers, connect, externalsched, amount_left, up, down, port, ip, myid, infohash, timeout, errorfunc, maxpeers, doneflag, upratefunc, downratefunc, ever_got_incoming): self.url = ('%s?info_hash=%s&peer_id=%s&port=%s&key=%s' % (url, quote(infohash), quote(myid), str(port), b2a_hex(''.join([chr(randrange(256)) for i in xrange(4)])))) if ip != '': self.url += '&ip=' + quote(ip) self.interval = interval self.last = None self.trackerid = None self.announce_interval = 30 * 60 self.sched = sched self.howmany = howmany self.minpeers = minpeers self.connect = connect self.externalsched = externalsched self.amount_left = amount_left self.up = up self.down = down self.timeout = timeout self.errorfunc = errorfunc self.maxpeers = maxpeers self.doneflag = doneflag self.upratefunc = upratefunc self.downratefunc = downratefunc self.ever_got_incoming = ever_got_incoming self.last_failed = True self.last_time = 0 def c(self): self.sched(self.c, self.interval) if self.ever_got_incoming(): getmore = self.howmany() <= self.minpeers / 3 else: getmore = self.howmany() < self.minpeers if getmore or time() - self.last_time > self.announce_interval: self.announce() def begin(self): self.sched(self.c, self.interval) self.announce(0) def announce(self, event = None): self.last_time = time() s = ('%s&uploaded=%s&downloaded=%s&left=%s' % (self.url, str(self.up()), str(self.down()), str(self.amount_left()))) if self.last is not None: s += '&last=' + quote(str(self.last)) if self.trackerid is not None: s += '&trackerid=' + quote(str(self.trackerid)) if self.howmany() >= self.maxpeers: s += '&numwant=0' else: s += '&compact=1' if event != None: s += '&event=' + ['started', 'completed', 'stopped'][event] set = SetOnce().set def checkfail(self = self, set = set): if set(): if self.last_failed and self.upratefunc() < 100 and self.downratefunc() < 100: self.errorfunc('Problem connecting to tracker - timeout exceeded') self.last_failed = True self.sched(checkfail, self.timeout) Thread(target = self.rerequest, args = [s, set]).start() def rerequest(self, url, set): try: h = urlopen(url) r = h.read() h.close() if set(): def add(self = self, r = r): self.last_failed = False self.postrequest(r) self.externalsched(add, 0) except (IOError, error), e: if set(): def fail(self = self, r = 'Problem connecting to tracker - ' + str(e)): if self.last_failed: self.errorfunc(r) self.last_failed = True self.externalsched(fail, 0) def postrequest(self, data): try: r = bdecode(data) check_peers(r) if r.has_key('failure reason'): self.errorfunc('rejected by tracker - ' + r['failure reason']) else: if r.has_key('warning message'): self.errorfunc('warning from tracker - ' + r['warning message']) self.announce_interval = r.get('interval', self.announce_interval) self.interval = r.get('min interval', self.interval) self.trackerid = r.get('tracker id', self.trackerid) self.last = r.get('last') p = r['peers'] peers = [] if type(p) == type(''): for x in xrange(0, len(p), 6): ip = '.'.join([str(ord(i)) for i in p[x:x+4]]) port = (ord(p[x+4]) << 8) | ord(p[x+5]) peers.append((ip, port, None)) else: for x in p: peers.append((x['ip'], x['port'], x.get('peer id'))) ps = len(peers) + self.howmany() if ps < self.maxpeers: if self.doneflag.isSet(): if r.get('num peers', 1000) - r.get('done peers', 0) > ps * 1.2: self.last = None else: if r.get('num peers', 1000) > ps * 1.2: self.last = None for x in peers: self.connect((x[0], x[1]), x[2]) except ValueError, e: if data != '': self.errorfunc('bad data from tracker - ' + str(e)) class SetOnce: def __init__(self): self.lock = Lock() self.first = True def set(self): try: self.lock.acquire() r = self.first self.first = False return r finally: self.lock.release() PenguinTV-4.2.0/penguintv/ptvbittorrent/RateMeasure.py0000644000000000000000000000273710646750246020041 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from time import time class RateMeasure: def __init__(self, left): self.start = None self.last = None self.rate = 0 self.remaining = None self.left = left self.broke = False self.got_anything = False def data_came_in(self, amount): if not self.got_anything: self.got_anything = True self.start = time() - 2 self.last = self.start self.left -= amount return self.update(time(), amount) def data_rejected(self, amount): self.left += amount def get_time_left(self): if not self.got_anything: return None t = time() if t - self.last > 15: self.update(t, 0) return self.remaining def get_size_left(self): return self.left def update(self, t, amount): self.left -= amount try: self.rate = ((self.rate * (self.last - self.start)) + amount) / (t - self.start) self.last = t self.remaining = self.left / self.rate if self.start < self.last - self.remaining: self.start = self.last - self.remaining except ZeroDivisionError: self.remaining = None if self.broke and self.last - self.start < 20: self.start = self.last - 20 if self.last - self.start > 20: self.broke = True PenguinTV-4.2.0/penguintv/ptvbittorrent/track.py0000644000000000000000000006006110646750246016722 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from parseargs import parseargs, formatDefinitions from RawServer import RawServer from HTTPHandler import HTTPHandler from NatCheck import NatCheck from threading import Event from bencode import bencode, bdecode, Bencached from zurllib import urlopen, quote, unquote from urlparse import urlparse from os import rename from os.path import exists, isfile from cStringIO import StringIO from time import time, gmtime, strftime from random import shuffle from sha import sha from types import StringType, IntType, LongType, ListType, DictType from binascii import b2a_hex, a2b_hex, a2b_base64 import sys from __init__ import version defaults = [ ('port', 80, "Port to listen on."), ('dfile', None, 'file to store recent downloader info in'), ('bind', '', 'ip to bind to locally'), ('socket_timeout', 15, 'timeout for closing connections'), ('save_dfile_interval', 5 * 60, 'seconds between saving dfile'), ('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'), ('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'), ('response_size', 50, 'number of peers to send in an info message'), ('timeout_check_interval', 5, 'time to wait between checking if any connections have timed out'), ('nat_check', 3, "how many times to check if a downloader is behind a NAT (0 = don't check)"), ('min_time_between_log_flushes', 3.0, 'minimum time it must have been since the last flush to do another one'), ('allowed_dir', '', 'only allow downloads for .torrents in this dir'), ('parse_allowed_interval', 15, 'minutes between reloading of allowed_dir'), ('show_names', 1, 'whether to display names from allowed dir'), ('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'), ('only_local_override_ip', 1, "ignore the ip GET parameter from machines which aren't on local network IPs"), ('logfile', '', 'file to write the tracker logs, use - for stdout (default)'), ('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'), ('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'), ('max_give', 200, 'maximum number of peers to give with any one request'), ] def statefiletemplate(x): if type(x) != DictType: raise ValueError for cname, cinfo in x.items(): if cname == 'peers': for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids) if type(y) != DictType: # ... for the active torrents, and each is a dictionary raise ValueError for id, info in y.items(): # ... of client ids interested in that torrent if (len(id) != 20): raise ValueError if type(info) != DictType: # ... each of which is also a dictionary raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent if type(info.get('ip', '')) != StringType: raise ValueError port = info.get('port') if type(port) not in (IntType, LongType) or port < 0: raise ValueError left = info.get('left') if type(left) not in (IntType, LongType) or left < 0: raise ValueError elif cname == 'completed': if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids) raise ValueError # ... for keeping track of the total completions per torrent for y in cinfo.values(): # ... each torrent has an integer value if type(y) not in (IntType, LongType): # ... for the number of reported completions for that torrent raise ValueError def parseTorrents(dir): import os a = {} for f in os.listdir(dir): if f[-8:] == '.torrent': try: p = os.path.join(dir,f) d = bdecode(open(p, 'rb').read()) h = sha(bencode(d['info'])).digest() i = d['info'] a[h] = {} a[h]['name'] = i.get('name', f) a[h]['file'] = f a[h]['path'] = p l = 0 if i.has_key('length'): l = i.get('length',0) elif i.has_key('files'): for li in i['files']: if li.has_key('length'): l = l + li['length'] a[h]['length'] = l except: # what now, boss? print "Error parsing " + f, sys.exc_info()[0] return a alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n' def isotime(secs = None): if secs == None: secs = time() return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs)) def compact_peer_info(ip, port): return ''.join([chr(int(i)) for i in ip.split('.')]) + chr((port & 0xFF00) >> 8) + chr(port & 0xFF) class Tracker: def __init__(self, config, rawserver): self.response_size = config['response_size'] self.dfile = config['dfile'] self.natcheck = config['nat_check'] self.max_give = config['max_give'] self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] self.only_local_override_ip = config['only_local_override_ip'] favicon = config['favicon'] self.favicon = None if favicon: if isfile(favicon): h = open(favicon, 'rb') self.favicon = h.read() h.close() else: print "**warning** specified favicon file -- %s -- does not exist." % favicon self.rawserver = rawserver self.becache1 = {} self.becache2 = {} self.cache1 = {} self.cache2 = {} self.times = {} if exists(self.dfile): h = open(self.dfile, 'rb') ds = h.read() h.close() tempstate = bdecode(ds) else: tempstate = {} if tempstate.has_key('peers'): self.state = tempstate else: self.state = {} self.state['peers'] = tempstate self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) statefiletemplate(self.state) for x, dl in self.downloads.items(): self.times[x] = {} for y, dat in dl.items(): self.times[x][y] = 0 if not dat.get('nat',1): ip = dat['ip'] gip = dat.get('given ip') if gip and is_valid_ipv4(gip) and (not self.only_local_override_ip or is_local_ip(ip)): ip = gip self.becache1.setdefault(x,{})[y] = Bencached(bencode({'ip': ip, 'port': dat['port'], 'peer id': y})) self.becache2.setdefault(x,{})[y] = compact_peer_info(ip, dat['port']) rawserver.add_task(self.save_dfile, self.save_dfile_interval) self.prevtime = time() self.timeout_downloaders_interval = config['timeout_downloaders_interval'] rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile,'a') sys.stdout = self.log print "# Log Started: ", isotime() except: print "Error trying to redirect stdout to log file:", sys.exc_info()[0] self.allow_get = config['allow_get'] if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_allowed_interval = config['parse_allowed_interval'] self.parse_allowed() else: self.allowed = None if unquote('+') != ' ': self.uq_broken = 1 else: self.uq_broken = 0 self.keep_dead = config['keep_dead'] def get(self, connection, path, headers): try: (scheme, netloc, path, pars, query, fragment) = urlparse(path) if self.uq_broken == 1: path = path.replace('+',' ') query = query.replace('+',' ') path = unquote(path)[1:] params = {} for s in query.split('&'): if s != '': i = s.index('=') params[unquote(s[:i])] = unquote(s[i+1:]) except ValueError, e: return (400, 'Bad Request', {'Content-Type': 'text/plain'}, 'you sent me garbage - ' + str(e)) if path == '' or path == 'index.html': s = StringIO() s.write('\n' \ 'BitTorrent download info\n') if self.favicon != None: s.write('\n') s.write('\n\n' \ '

    BitTorrent download info

    \n'\ '
      \n' '
    • tracker version: %s
    • \n' \ '
    • server time: %s
    • \n' \ '
    \n' % (version, isotime())) names = self.downloads.keys() if names: names.sort() tn = 0 tc = 0 td = 0 tt = 0 # Total transferred ts = 0 # Total size nf = 0 # Number of files displayed uc = {} ud = {} if self.allowed != None and self.show_names: s.write('
    \n' \ '\n') else: s.write('
    info hashtorrent namesizecompletedownloadingdownloadedtransferred
    \n' \ '\n') for name in names: l = self.downloads[name] n = self.completed.get(name, 0) tn = tn + n lc = [] for i in l.values(): if type(i) == DictType: if i['left'] == 0: lc.append(1) uc[i['ip']] = 1 else: ud[i['ip']] = 1 c = len(lc) tc = tc + c d = len(l) - c td = td + d if self.allowed != None and self.show_names: if self.allowed.has_key(name): nf = nf + 1 sz = self.allowed[name]['length'] # size ts = ts + sz szt = sz * n # Transferred for this torrent tt = tt + szt if self.allow_get == 1: linkname = '' + self.allowed[name]['name'] + '' else: linkname = self.allowed[name]['name'] s.write('\n' \ % (b2a_hex(name), linkname, size_format(sz), c, d, n, size_format(szt))) else: s.write('\n' \ % (b2a_hex(name), c, d, n)) ttn = 0 for i in self.completed.values(): ttn = ttn + i if self.allowed != None and self.show_names: s.write('\n' % (nf, size_format(ts), len(uc), tc, len(ud), td, tn, ttn, size_format(tt))) else: s.write('\n' % (nf, len(uc), tc, len(ud), td, tn, ttn)) s.write('
    info hashcompletedownloadingdownloaded
    %s%s%s%i%i%i%s
    %s%i%i%i
    %i files%s%i/%i%i/%i%i/%i%s
    %i files%i/%i%i/%i%i/%i
    \n' \ '

      \n' \ '
    • info hash: SHA1 hash of the "info" section of the metainfo (*.torrent)
    • \n' \ '
    • complete: number of connected clients with the complete file (total: unique IPs/total connections)
    • \n' \ '
    • downloading: number of connected clients still downloading (total: unique IPs/total connections)
    • \n' \ '
    • downloaded: reported complete downloads (total: current/all)
    • \n' \ '
    • transferred: torrent size * total downloaded (does not include partial transfers)
    • \n' \ '
    \n') else: s.write('

    not tracking any files yet...

    \n') s.write('\n' \ '\n') return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue()) elif path == 'scrape': fs = {} names = [] if params.has_key('info_hash'): if self.downloads.has_key(params['info_hash']): names = [ params['info_hash'] ] # else return nothing else: names = self.downloads.keys() names.sort() for name in names: l = self.downloads[name] n = self.completed.get(name, 0) c = len([1 for i in l.values() if type(i) == DictType and i['left'] == 0]) d = len(l) - c fs[name] = {'complete': c, 'incomplete': d, 'downloaded': n} if (self.allowed is not None) and self.allowed.has_key(name) and self.show_names: fs[name]['name'] = self.allowed[name]['name'] r = {'files': fs} return (200, 'OK', {'Content-Type': 'text/plain'}, bencode(r)) elif (path == 'file') and (self.allow_get == 1) and params.has_key('info_hash') and self.allowed.has_key(a2b_hex(params['info_hash'])): hash = a2b_hex(params['info_hash']) fname = self.allowed[hash]['file'] fpath = self.allowed[hash]['path'] return (200, 'OK', {'Content-Type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=' + fname}, open(fpath, 'rb').read()) elif path == 'favicon.ico' and self.favicon != None: return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon) if path != 'announce': return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas) try: if not params.has_key('info_hash'): raise ValueError, 'no info hash' if params.has_key('ip') and not is_valid_ipv4(params['ip']): raise ValueError('DNS name or invalid IP address given for IP') infohash = params['info_hash'] if self.allowed != None: if not self.allowed.has_key(infohash): return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': 'Requested download is not authorized for use with this tracker.'})) ip = connection.get_ip() ip_override = 0 if params.has_key('ip') and is_valid_ipv4(params['ip']) and ( not self.only_local_override_ip or is_local_ip(ip)): ip_override = 1 if params.has_key('event') and params['event'] not in ['started', 'completed', 'stopped']: raise ValueError, 'invalid event' port = long(params.get('port', '')) uploaded = long(params.get('uploaded', '')) downloaded = long(params.get('downloaded', '')) left = long(params.get('left', '')) myid = params.get('peer_id', '') if len(myid) != 20: raise ValueError, 'id not of length 20' rsize = self.response_size if params.has_key('numwant'): rsize = min(long(params['numwant']), self.max_give) except ValueError, e: return (400, 'Bad Request', {'Content-Type': 'text/plain'}, 'you sent me garbage - ' + str(e)) peers = self.downloads.setdefault(infohash, {}) self.completed.setdefault(infohash, 0) ts = self.times.setdefault(infohash, {}) confirm = 0 if peers.has_key(myid): myinfo = peers[myid] if myinfo.has_key('key'): if params.get('key') != myinfo['key']: return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': 'key did not match key supplied earlier'})) confirm = 1 elif myinfo['ip'] == ip: confirm = 1 else: confirm = 1 if params.get('event', '') != 'stopped' and confirm: ts[myid] = time() if not peers.has_key(myid): peers[myid] = {'ip': ip, 'port': port, 'left': left} if params.has_key('key'): peers[myid]['key'] = params['key'] if params.has_key('ip') and is_valid_ipv4(params['ip']): peers[myid]['given ip'] = params['ip'] mip = ip if ip_override: mip = params['ip'] if not self.natcheck or ip_override: self.becache1.setdefault(infohash,{})[myid] = Bencached(bencode({'ip': mip, 'port': port, 'peer id': myid})) self.becache2.setdefault(infohash,{})[myid] = compact_peer_info(mip, port) else: peers[myid]['left'] = left peers[myid]['ip'] = ip if params.get('event', '') == 'completed': self.completed[infohash] = 1 + self.completed[infohash] if port == 0: peers[myid]['nat'] = 2**30 elif self.natcheck and not ip_override: to_nat = peers[myid].get('nat', -1) if to_nat and to_nat < self.natcheck: NatCheck(self.connectback_result, infohash, myid, ip, port, self.rawserver) else: peers[myid]['nat'] = 0 elif confirm: if peers.has_key(myid): if self.becache1[infohash].has_key(myid): del self.becache1[infohash][myid] del self.becache2[infohash][myid] del peers[myid] del ts[myid] data = {'interval': self.reannounce_interval} if params.get('compact', 0): if rsize == 0: data['peers'] = '' else: cache = self.cache2.setdefault(infohash, []) if len(cache) < rsize: del cache[:] cache.extend(self.becache2.setdefault(infohash, {}).values()) shuffle(cache) del self.cache1.get(infohash, [])[:] data['peers'] = ''.join(cache[-rsize:]) del cache[-rsize:] else: if rsize == 0: data['peers'] = [] else: cache = self.cache1.setdefault(infohash, []) if len(cache) < rsize: del cache[:] cache.extend(self.becache1.setdefault(infohash, {}).values()) shuffle(cache) del self.cache2.get(infohash, [])[:] data['peers'] = cache[-rsize:] del cache[-rsize:] connection.answer((200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))) def connectback_result(self, result, downloadid, peerid, ip, port): record = self.downloads.get(downloadid, {}).get(peerid) if record is None or record['ip'] != ip or record['port'] != port: return if not record.has_key('nat'): record['nat'] = int(not result) else: if result: record['nat'] = 0 else: record['nat'] += 1 if result: self.becache1.setdefault(downloadid,{})[peerid] = Bencached(bencode({'ip': ip, 'port': port, 'peer id': peerid})) self.becache2.setdefault(downloadid,{})[peerid] = compact_peer_info(ip, port) def save_dfile(self): self.rawserver.add_task(self.save_dfile, self.save_dfile_interval) h = open(self.dfile, 'wb') h.write(bencode(self.state)) h.close() def parse_allowed(self): self.rawserver.add_task(self.parse_allowed, self.parse_allowed_interval * 60) self.allowed = parseTorrents(self.allowed_dir) def expire_downloaders(self): for x in self.times.keys(): for myid, t in self.times[x].items(): if t < self.prevtime: if self.becache1.get(x, {}).has_key(myid): del self.becache1[x][myid] del self.becache2[x][myid] del self.times[x][myid] del self.downloads[x][myid] self.prevtime = time() if (self.keep_dead != 1): for key, value in self.downloads.items(): if len(value) == 0: del self.times[key] del self.downloads[key] self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) def is_valid_ipv4(ip): try: x = compact_peer_info(ip, 0) if len(x) != 6: return False except (ValueError, IndexError): return False return True def is_local_ip(ip): try: v = [long(x) for x in ip.split('.')] if v[0] == 10 or v[0] == 127 or v[:2] in ([192, 168], [169, 254]): return 1 if v[0] == 172 and v[1] >= 16 and v[1] <= 31: return 1 except ValueError: return 0 def track(args): if len(args) == 0: print formatDefinitions(defaults, 80) return try: config, files = parseargs(args, defaults, 0, 0) except ValueError, e: print 'error: ' + str(e) print 'run with no arguments for parameter explanations' return r = RawServer(Event(), config['timeout_check_interval'], config['socket_timeout']) t = Tracker(config, r) r.bind(config['port'], config['bind'], True) r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes'])) t.save_dfile() print '# Shutting down: ' + isotime() def size_format(s): if (s < 1024): r = str(s) + 'B' elif (s < 1048576): r = str(int(s/1024)) + 'KiB' elif (s < 1073741824l): r = str(int(s/1048576)) + 'MiB' elif (s < 1099511627776l): r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB' else: r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB' return(r) PenguinTV-4.2.0/penguintv/ptvbittorrent/DownloaderFeedback.py0000644000000000000000000000544710646750246021330 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from time import time class DownloaderFeedback: def __init__(self, choker, add_task, statusfunc, upfunc, downfunc, uptotal, downtotal, remainingfunc, leftfunc, file_length, finflag, interval, spewflag): self.choker = choker self.add_task = add_task self.statusfunc = statusfunc self.upfunc = upfunc self.downfunc = downfunc self.uptotal = uptotal self.downtotal = downtotal self.remainingfunc = remainingfunc self.leftfunc = leftfunc self.file_length = file_length self.finflag = finflag self.interval = interval self.spewflag = spewflag self.lastids = [] self.display() def _rotate(self): cs = self.choker.connections for id in self.lastids: for i in xrange(len(cs)): if cs[i].get_id() == id: return cs[i:] + cs[:i] return cs def collect_spew(self): l = [ ] cs = self._rotate() self.lastids = [c.get_id() for c in cs] for c in cs: rec = {} rec["ip"] = c.get_ip() if c is self.choker.connections[0]: rec["is_optimistic_unchoke"] = 1 else: rec["is_optimistic_unchoke"] = 0 if c.is_locally_initiated(): rec["initiation"] = "local" else: rec["initiation"] = "remote" u = c.get_upload() rec["upload"] = (int(u.measure.get_rate()), u.is_interested(), u.is_choked()) d = c.get_download() rec["download"] = (int(d.measure.get_rate()), d.is_interested(), d.is_choked(), d.is_snubbed()) l.append(rec) return l def display(self): self.add_task(self.display, self.interval) spew = [] if self.finflag.isSet(): status = {"upRate" : self.upfunc(), "upTotal" : self.uptotal() / 1048576.0} if self.spewflag.isSet(): status['spew'] = self.collect_spew() self.statusfunc(status) return timeEst = self.remainingfunc() if self.file_length > 0: fractionDone = (self.file_length - self.leftfunc()) / float(self.file_length) else: fractionDone = 1 status = { "fractionDone" : fractionDone, "downRate" : self.downfunc(), "upRate" : self.upfunc(), "upTotal" : self.uptotal() / 1048576.0, "downTotal" : self.downtotal() / 1048576.0 } if timeEst is not None: status['timeEst'] = timeEst if self.spewflag.isSet(): status['spew'] = self.collect_spew() self.statusfunc(status) PenguinTV-4.2.0/penguintv/ptvbittorrent/HTTPHandler.py0000644000000000000000000001364610646750246017702 0ustar # Written by Bram Cohen # see LICENSE.txt for license information from cStringIO import StringIO from sys import stdout import time from gzip import GzipFile DEBUG = False weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] months = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] class HTTPConnection: def __init__(self, handler, connection): self.handler = handler self.connection = connection self.buf = '' self.closed = False self.done = False self.donereading = False self.next_func = self.read_type def get_ip(self): return self.connection.get_ip() def data_came_in(self, data): if self.donereading or self.next_func is None: return True self.buf += data while True: try: i = self.buf.index('\n') except ValueError: return True val = self.buf[:i] self.buf = self.buf[i+1:] self.next_func = self.next_func(val) if self.donereading: return True if self.next_func is None or self.closed: return False def read_type(self, data): self.header = data.strip() words = data.split() if len(words) == 3: self.command, self.path, garbage = words self.pre1 = False elif len(words) == 2: self.command, self.path = words self.pre1 = True if self.command != 'GET': return None else: return None if self.command not in ('HEAD', 'GET'): return None self.headers = {} return self.read_header def read_header(self, data): data = data.strip() if data == '': self.donereading = True # check for Accept-Encoding: header, pick a if self.headers.has_key('accept-encoding'): ae = self.headers['accept-encoding'] if DEBUG: print "Got Accept-Encoding: " + ae + "\n" else: #identity assumed if no header ae = 'identity' # this eventually needs to support multple acceptable types # q-values and all that fancy HTTP crap # for now assume we're only communicating with our own client if ae.find('gzip') != -1: self.encoding = 'gzip' else: #default to identity. self.encoding = 'identity' r = self.handler.getfunc(self, self.path, self.headers) if r is not None: self.answer(r) return None try: i = data.index(':') except ValueError: return None self.headers[data[:i].strip().lower()] = data[i+1:].strip() if DEBUG: print data[:i].strip() + ": " + data[i+1:].strip() return self.read_header def answer(self, (responsecode, responsestring, headers, data)): if self.closed: return if self.encoding == 'gzip': #transform data using gzip compression #this is nasty but i'm unsure of a better way at the moment compressed = StringIO() gz = GzipFile(fileobj = compressed, mode = 'wb', compresslevel = 9) gz.write(data) gz.close() compressed.seek(0,0) cdata = compressed.read() compressed.close() if len(cdata) >= len(data): self.encoding = 'identity' else: if DEBUG: print "Compressed: %i Uncompressed: %i\n" % (len(cdata),len(data)) data = cdata headers['Content-Encoding'] = 'gzip' # i'm abusing the identd field here, but this should be ok if self.encoding == 'identity': ident = '-' else: ident = self.encoding username = '-' referer = self.headers.get('referer','-') useragent = self.headers.get('user-agent','-') year, month, day, hour, minute, second, a, b, c = time.localtime(time.time()) print '%s %s %s [%02d/%3s/%04d:%02d:%02d:%02d] "%s" %i %i "%s" "%s"' % ( self.connection.get_ip(), ident, username, day, months[month], year, hour, minute, second, self.header, responsecode, len(data), referer, useragent) t = time.time() if t - self.handler.lastflush > self.handler.minflush: self.handler.lastflush = t stdout.flush() self.done = True r = StringIO() r.write('HTTP/1.0 ' + str(responsecode) + ' ' + responsestring + '\r\n') if not self.pre1: headers['Content-Length'] = len(data) for key, value in headers.items(): r.write(key + ': ' + str(value) + '\r\n') r.write('\r\n') if self.command != 'HEAD': r.write(data) self.connection.write(r.getvalue()) if self.connection.is_flushed(): self.connection.shutdown(1) class HTTPHandler: def __init__(self, getfunc, minflush): self.connections = {} self.getfunc = getfunc self.minflush = minflush self.lastflush = time.time() def external_connection_made(self, connection): self.connections[connection] = HTTPConnection(self, connection) def connection_flushed(self, connection): if self.connections[connection].done: connection.shutdown(1) def connection_lost(self, connection): ec = self.connections[connection] ec.closed = True del ec.connection del ec.next_func del self.connections[connection] def data_came_in(self, connection, data): c = self.connections[connection] if not c.data_came_in(data) and not c.closed: c.connection.shutdown(1) PenguinTV-4.2.0/penguintv/__init__.py0000644000000000000000000000000711310746416014412 0ustar #blank PenguinTV-4.2.0/penguintv/EntryView.py0000644000000000000000000002776511311770707014634 0ustar import logging import os import htmllib, HTMLParser import formatter import gobject import gtk import ptvDB import html.PTVhtml import utils from penguintv import DEFAULT, MANUAL_SEARCH, TAG_SEARCH, MAJOR_DB_OPERATION import EntryFormatter import Downloader import ThreadPool #states S_DEFAULT = 0 S_SEARCH = 1 class EntryView(gobject.GObject): __gsignals__ = { 'link-activated': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'entries-viewed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])) } def __init__(self, widget_tree, feed_list_view, entry_list_view, app, main_window, renderer=EntryFormatter.MOZILLA): gobject.GObject.__init__(self) self._app = app self._mm = self._app.mediamanager self._main_window = main_window self._renderer = renderer #self._renderer = EntryFormatter.GTKHTML #self._moz_realized = False self._state = S_DEFAULT self._auth_info = (-1, "","") #user:pass, url self._widget_tree = widget_tree html_dock = self._widget_tree.get_widget('html_dock') self._scrolled_window = gtk.ScrolledWindow() html_dock.add(self._scrolled_window) self._scrolled_window.set_property("hscrollbar-policy",gtk.POLICY_AUTOMATIC) self._scrolled_window.set_property("vscrollbar-policy",gtk.POLICY_AUTOMATIC) #thanks to straw, again style = html_dock.get_style().copy() self._currently_blank=True self._current_entry={} self._updater_timer=0 self._custom_entry = False self._convert_newlines = (-1, False) self._background_color = "#%.2x%.2x%.2x;" % ( style.base[gtk.STATE_NORMAL].red / 256, style.base[gtk.STATE_NORMAL].blue / 256, style.base[gtk.STATE_NORMAL].green / 256) self._foreground_color = "#%.2x%.2x%.2x;" % ( style.text[gtk.STATE_NORMAL].red / 256, style.text[gtk.STATE_NORMAL].blue / 256, style.text[gtk.STATE_NORMAL].green / 256) self._insensitive_color = "#%.2x%.2x%.2x;" % ( style.base[gtk.STATE_INSENSITIVE].red / 256, style.base[gtk.STATE_INSENSITIVE].blue / 256, style.base[gtk.STATE_INSENSITIVE].green / 256) #for style in [style.fg, style.bg, style.base, style.text, style.mid, style.light, style.dark]: # for category in [gtk.STATE_NORMAL, gtk.STATE_PRELIGHT, gtk.STATE_SELECTED, gtk.STATE_ACTIVE, gtk.STATE_INSENSITIVE]: # print "#%.2x%.2x%.2x;" % (style[category].red / 256, style[category].blue / 256,style[category].green / 256) # print "===========" #const found in __init__ self._css = "" #self.display_custom_entry("") self._entry_formatter = EntryFormatter.EntryFormatter(self._mm) self._search_formatter = EntryFormatter.EntryFormatter(self._mm, True) #self._auto_mark_viewed = self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/auto_mark_viewed', True) #signals self._handlers = [] h_id = feed_list_view.connect('no-feed-selected', self.__feedlist_none_selected_cb) self._handlers.append((feed_list_view.disconnect, h_id)) h_id = entry_list_view.connect('no-entry-selected', self.__entrylist_none_selected_cb) self._handlers.append((entry_list_view.disconnect, h_id)) h_id = entry_list_view.connect('entry-selected', self.__entry_selected_cb) self._handlers.append((entry_list_view.disconnect, h_id)) h_id = entry_list_view.connect('search-entry-selected', self.__search_entry_selected_cb) self._handlers.append((entry_list_view.disconnect, h_id)) h_id = self._app.connect('entry-updated', self.__entry_updated_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('render-ops-updated', self.__render_ops_updated_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('entries-viewed', self.__entries_viewed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('entries-unviewed', self.__entries_viewed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('state-changed', self.__state_changed_cb) self._handlers.append((self._app.disconnect, h_id)) h_id = self._app.connect('feed-polled', self.__feed_polled_cb) self._handlers.append((self._app.disconnect, h_id)) #h_id = app.connect('setting-changed', self.__setting_changed_cb) #self._handlers.append((app.disconnect, h_id)) if self._renderer == EntryFormatter.MOZILLA: import html.PTVMozilla self._html_widget = html.PTVMozilla.PTVMozilla(self, self._app.db.home, utils.get_share_prefix()) elif self._renderer == EntryFormatter.GTKHTML: import html.PTVGtkHtml self._html_widget = html.PTVGtkHtml.PTVGtkHtml(self, self._app.db.home, utils.get_share_prefix()) def get_display_id(self): try: return self._current_entry['entry_id'] except: return None def get_bg_color(self): return self._background_color def get_fg_color(self): return self._foreground_color def get_in_color(self): return self._insensitive_color def post_show_init(self): html_dock = self._widget_tree.get_widget('html_dock') self._html_widget.post_show_init(self._scrolled_window) self._html_widget.connect('link-message', self.__link_message_cb) self._html_widget.connect('open-uri', self.__open_uri_cb) self._USING_AJAX = self._html_widget.is_ajax_ok() html_dock.show_all() def __feedlist_none_selected_cb(self, o): self.display_item() def __entrylist_none_selected_cb(self, o): self.display_item() def __entry_selected_cb(self, o, entry_id, feed_id): item = self._app.db.get_entry(entry_id) media = self._app.db.get_entry_media(entry_id) if media: item['media']=media else: item['media']=[] #if self._auto_mark_viewed: # if self._app.db.get_flags_for_feed(feed_id) & ptvDB.FF_MARKASREAD: # item['read'] = 1 self.display_item(item) def __search_entry_selected_cb(self, o, entry_id, feed_id, search_query): item = self._app.db.get_entry(entry_id) media = self._app.db.get_entry_media(entry_id) if media: item['media']=media else: item['media']=[] self.display_item(item, search_query) def __entry_updated_cb(self, app, entry_id, feed_id): self.update_if_selected(entry_id, feed_id) def __render_ops_updated_cb(self, app): self._convert_newlines = (-1, False) self.update_if_selected(self._current_entry['entry_id'], self._current_entry['feed_id']) def __entries_viewed_cb(self, app, viewlist): for feed_id, entrylist in viewlist: for e in entrylist: self.update_if_selected(e, feed_id) def __feed_polled_cb(self, app, feed_id, update_data): pass #FIXME: "custom entry" doesn't really work well #if feed_id == self._current_entry['feed_id']: # if update_data['pollfail']: # self.display_custom_entry(""+_("There was an error trying to poll this feed.")+"") # else: # self.undisplay_custom_entry() #def __setting_changed_cb(self, app, typ, datum, value): # if datum == '/apps/penguintv/auto_mark_viewed': # self._auto_mark_viewed = value def __link_message_cb(self, o, message): if not utils.RUNNING_HILDON: self._main_window.display_status_message(message) def __open_uri_cb(self, o, uri): self.emit('link-activated', uri) def get_selected(self): if len(self._current_entry) == 0: return None elif not self._currently_blank: return self._current_entry['entry_id'] return None def progress_update(self, entry_id, feed_id): self.update_if_selected(entry_id, feed_id) def update_if_selected(self, entry_id, feed_id): """tests to see if this is the currently-displayed entry, and if so, goes back to the app and asks to redisplay it.""" #item, progress, message = data try: if len(self._current_entry) == 0: return except: print "exception" return if entry_id != self._current_entry['entry_id'] or self._currently_blank: return #assemble the updated info and display item = self._app.db.get_entry(entry_id) media = self._app.db.get_entry_media(entry_id) if media: item['media']=media else: item['media']=[] self.display_item(item) def display_custom_entry(self, message): self._html_widget.render("%s" % message) self._custom_entry = True def undisplay_custom_entry(self): if self._custom_entry: message = "" self._html_widget.render(message) self._custom_entry = False def _unset_state(self): self.display_custom_entry("") def __state_changed_cb(self, app, newstate, data=None): d = {DEFAULT: S_DEFAULT, MANUAL_SEARCH: S_SEARCH, TAG_SEARCH: S_SEARCH, #penguintv.ACTIVE_DOWNLOADS: S_DEFAULT, MAJOR_DB_OPERATION: S_DEFAULT} newstate = d[newstate] if newstate == self._state: return self._unset_state() self._state = newstate def display_item(self, item=None, highlight=""): #when a feed is refreshed, the item selection changes from an entry, #to blank, and to the entry again. We used to lose scroll position because of this. #Now, scroll position is saved when a blank entry is displayed, and if the next #entry is the same id as before the blank, we restore those old values. #we have a bool to figure out if the current page is blank, in which case we shouldn't #save its scroll values. if item: self._current_entry = item self._currently_blank = False if self._convert_newlines[0] != item['feed_id']: self._convert_newlines = (item['feed_id'], self._app.db.get_flags_for_feed(item['feed_id']) & ptvDB.FF_ADDNEWLINES == ptvDB.FF_ADDNEWLINES) if item['feed_id'] != self._auth_info[0] and self._auth_info[0] != -2: feed_info = self._app.db.get_feed_info(item['feed_id']) if feed_info['auth_feed']: self._auth_info = (item['feed_id'],feed_info['auth_userpass'], feed_info['auth_domain']) else: self._auth_info = (-2, "","") else: self._convert_newlines = (-1, False) self._currently_blank = True if self._state == S_SEARCH: formatter = self._search_formatter if item is not None: item['feed_title'] = self._app.db.get_feed_title(item['feed_id']) else: formatter = self._entry_formatter if item is None: header = """""" % (self._background_color,) else: header = "" html = self._html_widget.build_header(header) if item is None: html += """""" else: html += "%s" % formatter.htmlify_item(item, convert_newlines=self._convert_newlines[1]) #do highlighting for search mode html = html.encode('utf-8') if len(highlight)>0: try: highlight = highlight.replace("*","") p = EntryFormatter.HTMLHighlightParser(highlight) p.feed(html) html = p.new_data except: pass if self._auth_info[0] >= 0: try: p = EntryFormatter.HTMLImgAuthParser(self._auth_info[2], self._auth_info[1]) p.feed(html) html = p.new_data except: pass #print html self._html_widget.render(html, "file:///", self.get_display_id()) if item is not None: gobject.timeout_add(2000, self._do_delayed_set_viewed, item) return def _do_delayed_set_viewed(self, entry): if entry == self._current_entry: if not self._current_entry['read'] and \ not self._current_entry['keep'] and \ len(self._current_entry['media']) == 0: self.emit('entries-viewed', [(self._current_entry['feed_id'], [self._current_entry['entry_id']])]) return False def scroll_down(self): """ Old straw function, _still_ not used. One day I might have "space reading" """ va = self._scrolled_window.get_vadjustment() old_value = va.get_value() new_value = old_value + va.page_increment limit = va.upper - va.page_size if new_value > limit: new_value = limit va.set_value(new_value) return new_value > old_value def finish(self): for disconnector, h_id in self._handlers: disconnector(h_id) self._html_widget.finish() self._custom_entry = True return PenguinTV-4.2.0/penguintv/PTVXapian.py0000644000000000000000000002674010755136075014507 0ustar import os, os.path import utils import logging from threading import Lock, Thread from time import sleep import HTMLParser import xapian try: import sqlite3 as sqlite except: from pysqlite2 import dbapi2 as sqlite """ This class does the searching for PenguinTV. It has full access to its own database object. """ ENTRY_LIMIT=100 DATE = 0 FEED_ID = 1 ENTRY_ID = 2 ENTRY_TITLE = 3 class PTVXapian: _index_lock = Lock() def __init__(self): self.home = utils.get_home() try: os.stat(self.home) except: try: os.mkdir(self.home) except: raise DBError, "error creating directories: "+self.home self._storeDir = os.path.join(self.home, "xapian_store") self.needs_index = False try: os.stat(os.path.join(self._storeDir,"NEEDSREINDEX")) #if that exists, we need to reindex self.needs_index = True except: pass if self.needs_index: try: os.remove(os.path.join(self._storeDir,"NEEDSREINDEX")) except: logging.error("Error removing NEEDSREINDEX... check permisions inside %s" % self.home) if not os.path.exists(self._storeDir): os.mkdir(self._storeDir) self.needs_index = True self._quitting = False self._indexing = False def is_indexing(self, only_this_thread=False): if not only_this_thread: if self._index_lock.acquire(False): self._index_lock.release() return False else: return True else: return self._indexing def finish(self, wait=False): if wait: if self.is_indexing(only_this_thread=True): self._index_lock.acquire() self._index_lock.release() self._quitting = True def _interrupt(self): f = open(os.path.join(self._storeDir,"NEEDSREINDEX"),"w") f.close() def _get_db(self): try: if os.path.isfile(os.path.join(self.home,"penguintv4.db")) == False: raise DBError,"database file missing" db=sqlite.connect(os.path.join(self.home,"penguintv4.db"), timeout=10) db.isolation_level="DEFERRED" return db except: raise DBError, "Error connecting to database in Xapian module" def Do_Index_Threaded(self, callback): Thread(target=self.Do_Index, args=(callback,)).start() def Do_Index(self, callback=None): """loop through all feeds and entries and feed them to the beast""" def index_interrupt(): self._indexing = False self._index_lock.release() if callback is not None: callback() self._interrupt() return if not self._index_lock.acquire(False): logging.info("already indexing, not trying to reindex again") return self._indexing = True db = self._get_db() c = db.cursor() #remove existing DB utils.deltree(self._storeDir) database = xapian.WritableDatabase(self._storeDir, xapian.DB_CREATE_OR_OPEN) indexer = xapian.TermGenerator() stemmer = xapian.Stem("english") indexer.set_stemmer(stemmer) c.execute(u"""SELECT id, title, description FROM feeds""") feeds = c.fetchall() c.execute(u"""SELECT id, feed_id, title, description,fakedate FROM entries ORDER BY fakedate""") entries = c.fetchall() c.close() db.close() logging.info("indexing feeds") def feed_index_generator(feeds): for feed_id, title, description in feeds: try: doc = xapian.Document() forindex = title+" "+description #eh? we can only remove docs by term, but we can only #get values. so we need both it seems doc.add_term("f"+str(feed_id)) doc.add_value(FEED_ID, str(feed_id)) doc.add_value(DATE, "") doc.set_data(forindex) indexer.set_document(doc) indexer.index_text(forindex) #database.add_document(doc) yield doc except Exception, e: logging.error("Failed in indexDocs, feeds: %s" % str(e)) #sleep(0) #http://twistedmatrix.com/pipermail/twisted-python/2005-July/011052.html for doc in feed_index_generator(feeds): database.add_document(doc) if self._quitting: del database return index_interrupt() logging.info("indexing entries") def entry_index_generator(entries): for entry_id, feed_id, title, description, fakedate in entries: try: doc = xapian.Document() p = HTMLDataParser() p.feed(description) description = p.data forindex = title+" "+description doc.add_term("e"+str(entry_id)) doc.add_term("f"+str(feed_id)) doc.add_value(FEED_ID, str(feed_id)) doc.add_value(ENTRY_ID, str(entry_id)) doc.add_value(ENTRY_TITLE, title) doc.add_value(DATE, str(fakedate)) doc.set_data(forindex) indexer.set_document(doc) indexer.index_text(forindex) #database.add_document(doc) yield doc except Exception, e: logging.error("Failed in indexDocs, entries:" + str(e)) #sleep(.005) for doc in entry_index_generator(entries): database.add_document(doc) if self._quitting: del database return index_interrupt() del database self._indexing = False self._index_lock.release() if callback is not None: callback() def Re_Index_Threaded(self,feedlist=[], entrylist=[]): Thread(target=self.Re_Index, args=(feedlist,entrylist)).start() def Re_Index(self, feedlist=[], entrylist=[]): if len(feedlist) == 0 and len(entrylist) == 0: return def reindex_interrupt(): self._indexing = False self._index_lock.release() self._interrupt() #logging.debug("Reindex interrupted") return #logging.debug("Xapian reindexing: %i, %i" % (len(feedlist), len(entrylist))) self._index_lock.acquire() self._indexing = True db = self._get_db() c = db.cursor() database = xapian.WritableDatabase(self._storeDir, xapian.DB_CREATE_OR_OPEN) indexer = xapian.TermGenerator() stemmer = xapian.Stem("english") indexer.set_stemmer(stemmer) #feedlist = utils.uniquer(feedlist) entrylist = utils.uniquer(entrylist) #feed_addition = [] entry_addition = [] #for feed_id in feedlist: # if self._quitting: # del database # return reindex_interrupt() # try: # c.execute(u"""SELECT title, description FROM feeds WHERE id=?""",(feed_id,)) # title, description = c.fetchone() # feed_addition.append((feed_id, title, description)) # except TypeError: # pass #it won't be readded. Assumption is we have deleted this feed for entry_id in entrylist: if self._quitting: del database return reindex_interrupt() try: c.execute(u"""SELECT feed_id, title, description, fakedate FROM entries WHERE id=?""",(entry_id,)) feed_id, title, description, fakedate = c.fetchone() entry_addition.append((entry_id, feed_id, title, description, fakedate)) except TypeError: pass c.close() db.close() entry_addition = utils.uniquer(entry_addition) if self._quitting: del database return reindex_interrupt() #first delete anything deleted or changed #for feed_id in feedlist: # try: # database.delete_document("f"+str(feed_id)) # except Exception, e: # logging.error("Failed deleting feed: %s" % str(e)) for entry_id in entrylist: try: database.delete_document("e"+str(entry_id)) except Exception, e: logging.error("Failed deleting entry: %s" % str(e)) #now add back the changes #print [f[0] for f in feed_addition] #for feed_id, title, description in feed_addition: # if self._quitting: # del database # return reindex_interrupt() # try: # doc = xapian.Document() # # forindex = title+" "+description # # doc.add_term("f"+str(feed_id)) # doc.add_value(FEED_ID, str(feed_id)) # doc.add_value(DATE, "") # # doc.set_data(forindex) # indexer.set_document(doc) # indexer.index_text(forindex) # # database.add_document(doc) # except Exception, e: # logging.error("Failed adding feed: %s" % str(e)) #print [(e[0],e[1]) for e in entry_addition] for entry_id, feed_id, title, description, fakedate in entry_addition: if self._quitting: del database return reindex_interrupt() try: doc = xapian.Document() p = HTMLDataParser() p.feed(description) description = p.data forindex = title+" "+description doc.add_term("e"+str(entry_id)) doc.add_term("f"+str(feed_id)) doc.add_value(FEED_ID, str(feed_id)) doc.add_value(ENTRY_ID, str(entry_id)) doc.add_value(ENTRY_TITLE, title) doc.add_value(DATE, str(fakedate)) doc.set_data(forindex) indexer.set_document(doc) indexer.index_text(forindex) database.add_document(doc) except Exception, e: logging.error("Failed adding entry: %s" % str(e)) del database self._indexing = False self._index_lock.release() #logging.debug("Reindex complete") def Search(self, command, blacklist=[], include=['feeds','entries'], since=0): """returns two lists, one of search results in feeds, and one for results in entries. It is sorted so that title results are first, description results are second""" if not self._index_lock.acquire(False): #if we are indexing, don't try to search #print "wouldn't get lock" return ([],[]) self._index_lock.release() database = xapian.Database(self._storeDir) enquire = xapian.Enquire(database) qp = xapian.QueryParser() stemmer = xapian.Stem("english") qp.set_stemmer(stemmer) qp.set_database(database) qp.set_stemming_strategy(xapian.QueryParser.STEM_SOME) enquire.set_docid_order(xapian.Enquire.DESCENDING) enquire.set_weighting_scheme(xapian.BoolWeight()) # Display the results. #print "%i results found." % matches.get_matches_estimated() #print "Results 1-%i:" % matches.size() #for m in matches: # print "%i: %i%% docid=%i [%s] %s %s %s" % (m.rank + 1, m.percent, m.docid, m.document.get_data()[0:100], m.document.get_value(0), m.document.get_value(1), m.document.get_value(2)) feed_results=[] entry_results=[] query = qp.parse_query(command) enquire.set_query(query) matches = enquire.get_mset(0, 100) for m in matches: doc = m.document feed_id = doc.get_value(FEED_ID) feed_id = int(feed_id) try: if feed_id not in blacklist: entry_id = doc.get_value(ENTRY_ID) if entry_id is '': # meaning this is actually a feed (we could know that from above, but eh) feed_results.append(int(feed_id)) else: # meaning "entry" title = doc.get_value(ENTRY_TITLE) fakedate = float(doc.get_value(DATE)) / 1000.0 if fakedate > since: entry_results.append((int(entry_id),title, fakedate, feed_id)) #else: # print "excluding:"+doc.get("title") except Exception, e: print e print feed_id print blacklist for entry in entry_results: feed_results.append(entry[3]) feed_results = utils.uniquer(feed_results) entry_results = utils.uniquer(entry_results) #need to resort because we merged two lists together entry_results.sort(lambda x,y: int(y[2] - x[2])) #for e in entry_results: # print e[2],e[1] return (feed_results, entry_results) def merge(self, l1, l2): """merges two sorted lists""" if len(l1)>len(l2): l3 = l1 l1 = l2 l2 = l3 del l3 i=-1 for term,freq in l1: i+=1 while term > l2[i][0]: i+=1 if i>=len(l2):break if i >= len(l2): l2.append((term,freq)) break if term == l2[i][0]: l2[i] = (l2[i][0], l2[i][1] + freq) if term < l2[i][0]: l2.insert(i,(term,freq)) return l2 class DBError(Exception): def __init__(self,error): self.error = error def __str__(self): return self.error class HTMLDataParser(HTMLParser.HTMLParser): def __init__(self): HTMLParser.HTMLParser.__init__(self) self.data = "" def handle_data(self, data): self.data+=data PenguinTV-4.2.0/penguintv/OfflineImageCache.py0000644000000000000000000002440211275326304016131 0ustar ### OfflineImageCache # # Implements local image caching for feeds, transparently to the rest of the # application. # # Has a threadpool. Takes html and a GUID (entry_id) and downloads all the # images to a storage location. Hashes filenames based on url to prevent # collisions. # # Uses BeautifulSoup to rewrite html at render time to replace image tags with # locally cached urls. # # Need better checking of disk space, etc import urllib import urlparse import os, os.path import pickle import logging import glob import hashlib #requires python2.5 import time import gobject import utils import ThreadPool from BeautifulSoup.BeautifulSoup import BeautifulSoup DEBUG = False def guid_hash(guid): return str(hash(guid) % 20) def threaded_callback(): def annotate(func): def _exec_cb(self, *args, **kwargs): def timeout_func(self, func, *args, **kwargs): func(self, *args, **kwargs) return False if DEBUG: func(self, *args, **kwargs) else: gobject.idle_add(timeout_func, self, func, *args, **kwargs) return _exec_cb return annotate class OfflineImageCache: def __init__(self, store_location): #logging.debug("OFFLINE IMAGE CACHE STARTUP") self._store_location = store_location self._threadpool = ThreadPool.ThreadPool(5 ,"OfflineImageCache") self._cachers = {} # dict of urlcacher objects? def cache_html(self, guid, html): guid = str(guid) if self._cachers.has_key(guid): if self._cachers[guid]: #logging.debug("that cacher is already active, ignoring, %s" % str(guid)) return self._cachers[guid] = True page_cacher = PageCacher(guid, html, self._store_location, self._threadpool, self._cache_cb) def _cache_cb(self, guid): guid = str(guid) self._cachers[guid] = False def rewrite_html(self, guid, html=None, ajax_url=None): """if we are not using ajax, then html is IGNORED and we go by the cached copy. html is sometimes used to see if there should be a cached copy at all, or if something goes wrong and we just need to return unaltered html """ guid = str(guid) cache_dir = os.path.join(self._store_location, guid_hash(guid)) mapping_file = os.path.join(cache_dir, guid + "-" + "mapping.pickle") if not os.path.isfile(mapping_file): # quick and dirty check. are there images? if not, plain # html is fine if html.lower().find('= 0: #logging.warning("Should be downloaded images, but couldn't open mapping. Recaching") self.cache_html(guid, html) return html try: mapping = open(mapping_file, 'r') rewrite_hash = pickle.load(mapping) non_ajax_html = pickle.load(mapping) mapping.close() except: logging.error("error opening cache pickle for guid %s %s" % (guid, mapping_file)) logging.error("If you have upgraded penguintv, you might need to delete your image cache") return html if ajax_url is None: return non_ajax_html #else, rewrite on the fly soup = BeautifulSoup(html) img_tags = soup.findAll('img') if len(img_tags) == 0: return html for result in img_tags: # believe it or not, some img tags don't have a src, they have an id # that points to CSS. At least I think that's what's going on if result.has_key('src'): if rewrite_hash.has_key(result['src']): if rewrite_hash[result['src']][1] == UrlCacher.DOWNLOADED: #if os.path.isfile(os.path.join(self._store_location, rewrite_hash[result['src']][0])): result['src'] = ajax_url + "/cache/" + rewrite_hash[result['src']][0] #else: # logging.warning("file not found, not replacing") # logging.debug("(should we attempt to recache here?") return soup.prettify() def remove_cache(self, guid): guid = str(guid) cache_dir = os.path.join(self._store_location, guid_hash(guid)) mapping_file = os.path.join(cache_dir, guid + "-" + "mapping.pickle") if not os.path.isdir(cache_dir): # it was never cached I guess return if not os.path.isfile(mapping_file): # the dir exists, but not the file? #logging.warning("no mapping file, not deleting anything") return try: mapping = open(mapping_file, 'r') except: logging.error("error opening cache pickle for guid %s %s" % (guid, mapping_file)) return rewrite_hash = pickle.load(mapping) mapping.close() os.remove(mapping_file) for url in rewrite_hash.keys(): try: os.remove(os.path.join(self._store_location, rewrite_hash[url][0])) except Exception, e: logging.warning("error removing file: %s" % str(e)) try: #os.rmdir(cache_dir) #utils.deltree(cache_dir) for f in glob.glob(os.path.join(cache_dir, guid + "-*")): os.remove(f) except Exception, e: #import glob logging.warning("error while removing image cache %s" % str(e)) #logging.debug(glob.glob(os.path.join(cache_dir, "*"))) #logging.debug(str(rewrite_hash)) def cleanup(self): logging.info("Cleaning out old cached images") now = time.time() for root,d,files in os.walk(self._store_location): if root != self._store_location: for f in files: try: date = os.stat(os.path.join(root,f)).st_mtime if now - date > 60*60*24*30: #one month logging.info("deleting %s" % os.path.join(root,f)) #print ("deleting %s" % os.path.join(root,f)) os.remove(os.path.join(root,f)) except: pass def finish(self): self._threadpool.joinAll(False, False) class PageCacher: """ Take html and download all of the images to the store location. Then process the html and rewrite the tags to point to these images. The new html is then cached along with the image mapping in a pickle file. Note: this cached version of the html is only good for non-ajax use. In the case of ajax, the urls need to be rewritten on the fly """ def __init__(self, guid, html, store_location, threadpool, finished_cb=None): self._guid = str(guid) self._store_location = store_location self._threadpool = threadpool self._finished_cb = finished_cb self._soup = None self._cacher = UrlCacher(self._guid, self._store_location, self._threadpool, self._page_cached_cb) self._cache_dir = os.path.join(self._store_location, guid_hash(self._guid)) try: os.remove(os.path.join(self._cache_dir, self._guid + "-" + "mapping.pickle")) except: pass self._threadpool.queueTask(self._get_soup, html, taskCallback=self.process) def _get_soup(self, html): return BeautifulSoup(html) def process(self, soup): # go through html and pull out images, feed them into cacher self._soup = soup for result in self._soup.findAll('img'): if result.has_key('src'): self._cacher.queue_download(result['src']) self._cacher.start_downloads() def _page_cached_cb(self): rewrite_hash = self._cacher.get_rewrite_hash() try: mapping = open(os.path.join(self._cache_dir, self._guid + "-" + "mapping.pickle"), 'w') except: logging.error("error writing mapping %s" % os.path.join(self._cache_dir, self._guid + "-" + "mapping.pickle")) self._finished_cb(self._guid) return img_tags = self._soup.findAll('img') for result in img_tags: # believe it or not, some img tags don't have a src, they have an id # that points to CSS. At least I think that's what's going on if result.has_key('src'): if rewrite_hash.has_key(result['src']): if rewrite_hash[result['src']][1] == UrlCacher.DOWNLOADED: #if os.path.isfile(os.path.join(self._store_location, rewrite_hash[result['src']][0])): result['src'] = "file://" + os.path.join(self._store_location, rewrite_hash[result['src']][0]) # logging.warning("file not found, not replacing") # logging.debug("(should we attempt to recache here?") non_ajax_html = self._soup.prettify() pickle.dump(rewrite_hash, mapping) pickle.dump(non_ajax_html, mapping) mapping.close() self._finished_cb(self._guid) class UrlCacher: FAIL = -1 NOT_DOWNLOADED = 0 DOWNLOADED = 1 RETRY_LIMIT = 1 def __init__(self, guid, store_location, threadpool, finished_cb): self._guid = str(guid) self._store_location = store_location self._threadpool = threadpool self._finished_cb = finished_cb self._cache_status = {} #a dict of url: [localfile, status] self._dir_checked = False def queue_download(self, url): if self._cache_status.has_key(url): return md5 = hashlib.md5() md5.update(url) filename = urlparse.urlparse(url)[2] extension = os.path.splitext(filename)[1] local_filename = os.path.join(guid_hash(self._guid), self._guid + "-" + md5.hexdigest()) + extension if os.path.isfile(local_filename): #TODO: some sort of md5sum of the file? or just assume it's ok? #urlretrieve probably guarantees success or it doesn't write the file self._cache_status[url] = [local_filename, UrlCacher.DOWNLOADED] else: self._cache_status[url] = [local_filename, UrlCacher.NOT_DOWNLOADED] def start_downloads(self): for url in self._cache_status.keys(): if self._cache_status[url][1] != UrlCacher.DOWNLOADED: self._threadpool.queueTask(self._download_image, (url, self._cache_status[url][0]), self._download_complete) def get_rewrite_hash(self): return self._cache_status.copy() def _download_image(self, args, retry=0): """ downloads an image at url, and stores it as local filename. threaded""" url, local_filename = args cache_dir = os.path.join(self._store_location, guid_hash(self._guid)) if not self._dir_checked: if not os.path.exists(cache_dir): try: os.makedirs(cache_dir) except: pass self._dir_checked = True try: urllib.urlretrieve(url, os.path.join(self._store_location, local_filename)) except: #TODO: any need to check if we have to delete half-dled file? if retry >= UrlCacher.RETRY_LIMIT: return (url, False) else: return self._download_image(args, retry+1) return (url, True) @threaded_callback() def _download_complete(self, args): url, success = args if success: #logging.debug("Downloaded %s" % url) self._cache_status[url][1] = UrlCacher.DOWNLOADED else: #logging.debug("Failed to downloaded %s" % url) self._cache_status[url][1] = UrlCacher.FAIL self._check_finished() def _check_finished(self): for url in self._cache_status.keys(): if self._cache_status[url][1] == UrlCacher.NOT_DOWNLOADED: return # does not guarantee success, just that we tried self._finished_cb() PenguinTV-4.2.0/penguintv/Player.py0000644000000000000000000001106111371366574014123 0ustar # Written by Owen Williams # see LICENSE for license information import subProcess import utils import os, os.path import urllib import logging from types import * from MainWindow import N_PLAYER import gobject class Player: def __init__(self, app, gst_player=None): self._app = app self._gst_player = gst_player self.cmdline = 'totem --enqueue' if utils.RUNNING_SUGAR: import sugar.env home = os.path.join(sugar.env.get_profile_path(), 'penguintv') else: home = os.path.join(os.getenv('HOME'), ".penguintv") try: os.stat(os.path.join(home, 'media')) except: try: os.mkdir(os.path.join(home, 'media')) except: raise NoDir, "error creating " +os.path.join(home,'/media') self.media_dir = os.path.join(home, 'media') try: playlist = open(os.path.join(self.media_dir,"recovery_playlist.m3u") , "w") playlist.write("#EXTM3U\n") playlist.close() except: print "Warning: couldn't append to playlist file", os.path.join(self.media_dir,"recovery_playlist.m3u") pass def using_internal_player(self): return self._gst_player != None def internal_player_exposed(self): return self._gst_player.is_exposed() def connect_internal(self, signal, func): assert self.using_internal_player() self._gst_player.connect(signal, func) def control_internal(self, action): assert self.using_internal_player() def _expose_check_generator(q_action): """Wait for player to become exposed, then play""" for i in range(0,10): if self.internal_player_exposed(): self.control_internal(q_action) yield True break yield False yield False if self.using_internal_player(): if not self.internal_player_exposed(): self._app.main_window.notebook_select_page(N_PLAYER) gobject.timeout_add(200, _expose_check_generator(action).next) return if action.lower() == "play": self._gst_player.play() elif action.lower() == "pause": self._gst_player.pause() elif action.lower() == "next": self._gst_player.next() elif action.lower() in ("prev", "previous"): self._gst_player.prev() elif action.lower() == "playpause": self._gst_player.play_pause_toggle() elif action.lower() == "stop": self._gst_player.stop() else: print "unhandled action:",action def get_queue(self): assert self.using_internal_player() return self._gst_player.get_queue() def unqueue(self, userdata): if self.using_internal_player(): self._gst_player.unqueue(userdata=userdata) def play(self, f, title=None, userdata=None, force_external=False, context=None): self.play_list([[f,title,userdata]], force_external, context) def play_list(self, files, force_external = False, context=None): cmdline = self.cmdline try: playlist = open(os.path.join(self.media_dir,"recovery_playlist.m3u") , "a") playlist.write("#"*20+"\n") except: print "Warning: couldn't append to playlist file", os.path.join(self.media_dir,"recovery_playlist.m3u") players={} for f,t,u in files: if os.path.isdir(f): for root,dirs,filelist in os.walk(f): for filen in filelist: next = os.path.join(f, filen) if os.path.isfile(next) and utils.is_known_media(filen): head,filename = os.path.split(next) dated_dir = os.path.split(head)[1] playlist.write(os.path.join(dated_dir, filename)+"\n") player = utils.get_play_command_for(filen) if players.has_key(player): players[player].append(filen) else: players[player]=[filen] elif os.path.isfile(f): head,filename = os.path.split(f) dated_dir = os.path.split(head)[1] playlist.write(os.path.join(dated_dir,filename)+"\n") player = utils.get_play_command_for(f) if players.has_key(player): players[player].append(f) else: players[player]=[f] playlist.close() if self._gst_player is not None and not force_external: for f,t,u in files: self._gst_player.queue_file(f,name=t,userdata=u) else: if utils.RUNNING_HILDON: import osso.rpc rpc_handler = osso.rpc.Rpc(context) for filename,t,u in files: uri = str("file://" + filename) logging.debug("Trying to launch media player: %s" % uri) rpc_handler.rpc_run_with_defaults('mediaplayer', 'mime_open', (uri,)) else: for player in players.keys(): cmdline=player+" " for filename in players[player]: cmdline+=filename+" " cmdline+="&" #print "running: "+str(cmdline) subProcess.subProcess(cmdline) class NoDir(Exception): def __init__(self,durr): self.durr = durr def __str__(self): return "no such directory: "+self.durr PenguinTV-4.2.0/penguintv/AddFeedDialog.py0000644000000000000000000001567011140613163015254 0ustar # Written by Owen Williams # see LICENSE for license information import gtk #import urllib , urlparse loaded as needed import socket import gettext import os.path import traceback import sys import logging #loaded as needed #import feedparser import HTMLParser import utils import AddFeedUtils from ptvDB import FF_NOAUTODOWNLOAD, FF_NOSEARCH, FF_NOAUTOEXPIRE, \ FF_NOTIFYUPDATES, FF_ADDNEWLINES, FF_MARKASREAD, \ FF_NOKEEPDELETED import LoginDialog if utils.HAS_PYXML: import itunes _=gettext.gettext class AddFeedDialog: def __init__(self,xml,app): self._xml = xml self._app = app self._window = xml.get_widget("window_add_feed") if not utils.RUNNING_SUGAR and not utils.RUNNING_HILDON: self._window.set_transient_for(self._app.main_window.get_parent()) for key in dir(self.__class__): if key[:3] == 'on_': self._xml.signal_connect(key, getattr(self,key)) if not utils.RUNNING_SUGAR: self._edit_tags_widget = self._xml.get_widget("edit_tags_widget") self._feed_url_widget = self._xml.get_widget("feed_url") else: combo = self._xml.get_widget("feed_combo") self._feed_url_widget = combo.child combo.connect('changed', self.on_sugar_combo_changed) self._edit_tags_widget = None def extract_content(self): box = self._xml.get_widget('add_feed_box') box.unparent() box.show_all() self._window = None return box def show(self, autolocation=True): if utils.RUNNING_HILDON: self._window.resize(650,300) self._window.show_all() self._feed_url_widget.grab_focus() if self._window: self._window.show() self._feed_url_widget.set_text("") if autolocation: self.set_location_automatically() self._edit_tags_widget.set_text("") #if not utils.USE_TAGGING: # l = self._xml.get_widget("add_feed_label") # l.set_text(_("Please enter the URL of the feed you would like to add:")) # self._xml.get_widget("tag_hbox").hide() if not utils.HAS_SEARCH: self._xml.get_widget('b_search').hide() if utils.RUNNING_SUGAR or not utils.HAS_STATUS_ICON: self._xml.get_widget('b_notifyupdates').hide() #ripped from straw def set_location_automatically(self): def _clipboard_cb(cboard, text, data=None): if text: if text.upper().startswith("FEED:") or \ text.upper().startswith("HTTP"): self._feed_url_widget.set_text(text) clipboard = gtk.clipboard_get(selection="CLIPBOARD") clipboard.request_text(_clipboard_cb, None) def set_location(self, url=""): self._feed_url_widget.set_text(url) #(olpc) Sugar-only def set_existing_feeds(self, existing_list): assert utils.RUNNING_SUGAR model = gtk.ListStore(int, str, str) #id, title, url combo = self._xml.get_widget("feed_combo") old_model = combo.get_model() for feed_id, title, url in existing_list: model.append([feed_id, title, url]) combo.set_model(model) combo.set_text_column(1) del old_model def on_window_add_feed_delete_event(self, widget, event): if self._window: return self._window.hide_on_delete() def hide(self): self._feed_url_widget.set_text("") if self._window: self._window.hide() def finish(self): flags = 0 if not utils.RUNNING_SUGAR: #reversed if not self._xml.get_widget('b_download').get_active(): flags += FF_NOAUTODOWNLOAD #reversed if not self._xml.get_widget('b_search').get_active(): flags += FF_NOSEARCH if self._xml.get_widget('b_notifyupdates').get_active(): flags += FF_NOTIFYUPDATES if self._xml.get_widget('b_noautoexpire').get_active(): flags += FF_NOAUTOEXPIRE if self._xml.get_widget('b_addnewlines').get_active(): flags += FF_ADDNEWLINES if self._xml.get_widget('b_markasread').get_active(): flags += FF_MARKASREAD if self._xml.get_widget('b_nokeepdeleted').get_active(): flags += FF_NOKEEPDELETED tags=[] if len(self._edit_tags_widget.get_text()) > 0: for tag in self._edit_tags_widget.get_text().split(','): tags.append(tag.strip()) url = self._feed_url_widget.get_text() if self._window: self._window.set_sensitive(False) while gtk.events_pending(): #make sure the sensitivity change goes through gtk.main_iteration() try: logging.debug("getting corrected url:") url,title = AddFeedUtils.correct_url(url, utils.get_glade_prefix()) logging.debug("got: %s %s" % (url, title)) if url is None: if self._window: self._window.set_sensitive(True) return feed_id = self._app.add_feed(url, title, tags) self._app.db.set_flags_for_feed(feed_id, flags) except AddFeedUtils.AuthorizationFailed: dialog = gtk.Dialog(title=_("Authorization Required"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("You must specify a valid username and password in order to add this feed.")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog if self._window: self._window.set_sensitive(True) return except AddFeedUtils.AuthorizationCancelled: if self._window: self._window.set_sensitive(True) return except AddFeedUtils.BadFeedURL, e: logging.error(str(e)) dialog = gtk.Dialog(title=_("No Feed in Page"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("PenguinTV couldn't find a feed in the web page you provided.\nYou will need to find the RSS feed link in the web page yourself. Sorry.")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog if self._window: self._window.set_sensitive(True) return #except: # self._window.set_sensitive(True) # return if self._window: self._window.set_sensitive(True) if feed_id == -1: return #don't hide, give them a chance to try again. self.hide() def on_help_button_clicked(self, event): dialog = gtk.Dialog(title=_("Add Feed Help"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) hbox = gtk.HBox() hbox.set_spacing(12) image = gtk.image_new_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_DIALOG) hbox.pack_start(image, False, False, 12) label = gtk.Label(_("""Please enter the URL of the feed you would like to add. Also enter any tags you would like to apply to this feed, separated by commas""")) label.set_line_wrap(True) hbox.pack_start(label, True, True, 0) dialog.vbox.pack_start(hbox, True, True, 0) dialog.show_all() dialog.resize(400,-1) response = dialog.run() dialog.hide() del dialog def on_button_ok_clicked(self,event): self.finish() def on_feed_url_activate(self, event): self.finish() def on_edit_tags_widget_activate(self, event): self.finish() def on_button_cancel_clicked(self,event): self.hide() def on_sugar_combo_changed(self, combo): model = combo.get_model() active = combo.get_active() if active == -1: return self._feed_url_widget.set_text(model[active][2]) PenguinTV-4.2.0/penguintv/PtvTrayIcon.py0000644000000000000000000002005411071027525015076 0ustar import gtk import gobject import logging import ptvDB from Downloader import FINISHED, FINISHED_AND_PLAY import trayicon.TrayIcon import IconManager import utils import MainWindow NOTIFY_ENTRY = 0 NOTIFY_DOWNLOAD = 1 class PtvTrayIcon: def __init__(self, app, icon): self._app = app self._app.connect('feed-polled', self._feed_polled_cb) self._app.connect('notify-tags-changed', self._update_notification_feeds) self._app.connect('download-finished', self._download_finished_cb) self._app.connect('app-loaded', self._app_loaded_cb) self._app.connect('setting-changed', self.__setting_changed_cb) self._updates = [] self._updater_id = -1 self._notification_feeds = [] self._update_notification_feeds() self._icon_manager = IconManager.IconManager(self._app.db.home) self._show_notifications = self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/show_notifications', True) #Set up the right click menu menu = """ """ actions = [ ('Menu', None, 'Menu'), ('Play', gtk.STOCK_MEDIA_PLAY, _('_Play'), None, _('Play Media'), self.__play_cb), ('Pause', gtk.STOCK_MEDIA_PAUSE, _('_Pause'), None, _('Pause Media'), self.__pause_cb), ('Refresh', gtk.STOCK_REFRESH, _('_Refresh'), None, _('Refresh feeds'), self.__refresh_cb), ('About', gtk.STOCK_ABOUT, _('_About'), None, _('About PenguinTV'), self.__about_cb), ('Quit', gtk.STOCK_QUIT, _('_Quit'), None, _('Quit PenguinTV'), self.__quit_cb) ] actiongroup = gtk.ActionGroup('Actions') actiongroup.add_actions(actions) actions = [ ('ShowNotifications', None, _('Show Notifications'), None, _('Show feed and download updates'), self.__toggle_notifs_cb, self._show_notifications) ] actiongroup.add_toggle_actions(actions) #Use UIManager to turn xml into gtk menu self.manager = gtk.UIManager() self.manager.insert_action_group(actiongroup, 0) self.manager.add_ui_from_string(menu) menu = self.manager.get_widget('/Menubar/Menu/About').props.parent show_always = self._app.db.get_setting(ptvDB.BOOL, '/apps/penguintv/show_notification_always', True) self._tray_icon = trayicon.TrayIcon.StatusTrayIcon(icon, menu, show_always) self._tray_icon.connect('notification-clicked', self._notification_clicked_cb) d = {'version': utils.VERSION} self._tray_icon.set_tooltip(_("PenguinTV Version %(version)s") % d) play, pause = self._get_playpause_menuitems() play.hide() pause.hide() self._player_showing = False def set_parent(self, p): self._tray_icon.set_parent(p) def set_show_always(self, b): self._tray_icon.set_show_always(b) def set_tooltip(self, m): if len(m) == 0: d = {'version': utils.VERSION} self._tray_icon.set_tooltip(_("PenguinTV Version %(version)s") % d) else: self._tray_icon.set_tooltip(m) def clear_notifications(self): self._updates = [] self._tray_icon.clear_notifications() def __setting_changed_cb(self, app, typ, datum, value): if datum == '/apps/penguintv/show_notifications': show_notifs_item = self.manager.get_widget('/Menubar/Menu/ShowNotifications') if value != show_notifs_item.get_active(): show_notifs_item.set_active(value) self._show_notifications = value if value == False: self.clear_notifications() def _app_loaded_cb(self, app): logging.debug("ptvtrayicon gets app-loaded event") play, pause = self._get_playpause_menuitems() if self._app.player.using_internal_player(): logging.debug("connecting to gstreamer player") self._app.player.connect_internal('playing', self.__gst_playing_cb) self._app.player.connect_internal('paused', self.__gst_paused_cb) if len(self._app.player.get_queue()) > 0: play.show() pause.hide() else: play.hide() pause.hide() self._app.main_window.connect('player-show', self.__gst_player_show_cb) self._app.main_window.connect('player-hide', self.__gst_player_hide_cb) def _update_notification_feeds(self, obj=None): self._notification_feeds = self._app.db.get_feeds_for_flag(ptvDB.FF_NOTIFYUPDATES) def _download_finished_cb(self, app, d): if (d.status == FINISHED or d.status == FINISHED_AND_PLAY) and \ self._show_notifications: entry = self._app.db.get_entry(d.media['entry_id']) entry_title = utils.my_quote(entry['title']) feed_title = self._app.db.get_feed_title(entry['feed_id']) feed_title = utils.my_quote(feed_title) icon = self._icon_manager.get_icon(entry['feed_id']) title = _("Download Complete") d2 = {'feed_title':feed_title, 'entry_title':entry_title, 'size': utils.format_size(d.total_size)} message = _("%(feed_title)s: %(entry_title)s" % d2) self._tray_icon.display_notification(title, message, icon, (NOTIFY_DOWNLOAD, d.media['media_id'])) def _feed_polled_cb(self, app, feed_id, update_data): try: new_entries = update_data['new_entries'] except: return ## debug: guarantee notification #if new_entries == 0: # new_entries = 10 if feed_id in self._notification_feeds and self._show_notifications: entries = self._app.db.get_entrylist(feed_id)[0:new_entries] entries = [(feed_id,e[0]) for e in entries] entries.reverse() if len(self._updates) >= 10: self._updates += entries[-2:] else: self._updates += entries[-7:] # seven max if self._updater_id == -1: self._updater_id = gobject.idle_add(self._push_update_handler) def _push_update_handler(self): if len(self._updates) == 0 or not self._show_notifications: self._updater_id = -1 return False feed_id, entry_id = self._updates.pop(0) feed_title = self._app.db.get_feed_title(feed_id) entry = self._app.db.get_entry(entry_id) icon = self._icon_manager.get_icon(feed_id) feed_title = utils.my_quote(feed_title) entry_title = utils.my_quote(entry['title']) self._tray_icon.display_notification(feed_title, entry_title, icon, (NOTIFY_ENTRY, entry)) return True def _notification_clicked_cb(self, obj, userdata): if userdata[0] == NOTIFY_DOWNLOAD: self._app.activate_link("play:"+str(userdata[1])) elif userdata[0] == NOTIFY_ENTRY: entry = userdata[1] #self._app.select_entry(entry['entry_id']) self._app.mark_entry_as_viewed(entry['entry_id'], entry['feed_id']) self._app.activate_link(entry['link']) def __quit_cb(self, data): self._app.do_quit() def __about_cb(self, data): self._app.main_window.on_about_activate(None) def __refresh_cb(self, data): self._app.poll_feeds() def __toggle_notifs_cb(self, toggleaction): show_notifs = toggleaction.get_active() self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/show_notifications', show_notifs) self._show_notifications = show_notifs if show_notifs == False: self.clear_notifications() def _get_playpause_menuitems(self): playitem = self.manager.get_widget('/Menubar/Menu/Play') pauseitem = self.manager.get_widget('/Menubar/Menu/Pause') return playitem, pauseitem def __play_cb(self, obj): if self._app.player.using_internal_player(): self._app.player.control_internal("play") def __pause_cb(self, obj): if self._app.player.using_internal_player(): self._app.player.control_internal("pause") def __gst_player_show_cb(self, obj): if not self._player_showing: play, pause = self._get_playpause_menuitems() play.show() pause.hide() self._player_showing = True def __gst_player_hide_cb(self, obj): play, pause = self._get_playpause_menuitems() play.hide() pause.hide() self._player_showing = False def __gst_playing_cb(self, obj): play, pause = self._get_playpause_menuitems() play.hide() pause.show() def __gst_paused_cb(self, obj): play, pause = self._get_playpause_menuitems() play.show() pause.hide() PenguinTV-4.2.0/penguintv/MediaManager.py0000644000000000000000000003545611371560642015207 0ustar # Written by Owen Williams # see LICENSE for license information import ptvDB from types import * import ThreadPool import time import os,os.path import copy import logging import shutil import re import Downloader #import BTDownloader loaded when needed #import HTTPDownloader loaded when needed from utils import format_size import utils if utils.RUNNING_HILDON: HAS_GNOME = False else: try: import gnome HAS_GNOME = True except: HAS_GNOME = False from penguintv import DOWNLOAD_ERROR, DOWNLOAD_PROGRESS, DOWNLOAD_WARNING, DOWNLOAD_QUEUED from Downloader import QUEUED, DOWNLOADING, FINISHED, FINISHED_AND_PLAY, STOPPED, FAILURE, PAUSED RUNNING = 0 PAUSING = 1 PAUSED = 2 BYDATE = 0 BYNAME = 1 #Downloader API: #constructor takes: media, params, resume, queue, progress_callback, finished_callback # media: the media dic # params: optional params, like for btdownloader # resume: are we supposed to resume? # queue: are we supposed queue for playback when download is finished? this variable is just passed around # progress_callback: function to call for progress update. # arg of this is: (media, progress as 0 < x < 1, and text formatted message of progress) # the callback will return 1 if we should cancel download # finished_callback: function to call when finished. # args is: (media, status, message) # where status is the enum above class MediaManager: def __init__(self, app, media_dir, progress_callback=None, finished_callback=None): self.index=0 #should this be lucene compatible? if utils.RUNNING_HILDON: max_downloads = 1 else: max_downloads = 5 self._style=BYDATE self.pool = ThreadPool.ThreadPool(max_downloads, "MediaManager") self.downloads = [] self.db = app.db self.time_appendix=0 self.bt_settings = {'min_port':6881, 'max_port':6999, 'ul_limit':0} self.id_time=0 self.quitting = False self._net_connected = True self.pause_state = RUNNING if finished_callback: self.app_callback_finished = finished_callback else: self.app_callback_finished = self._basic_finished_callback if progress_callback: self.app_callback_progress = progress_callback else: self.app_callback_progress = self._basic_progress_callback home=self.db.home if media_dir[0] == '~': media_dir = os.getenv('HOME') + media_dir[1:] try: os.stat(media_dir) except: try: os.mkdir(media_dir) except: raise NoDir, "error creating " +media_dir self._media_dir = media_dir app.connect('online-status-changed', self.__online_status_changed) app.connect('feed-name-changed',self.__feed_name_changed_cb) app.connect('new-database', self.__new_database_cb) def finish(self): self.quitting = True try: self.pool.joinAll() del self.pool except: pass def __del__(self): self.finish() def set_media_dir(self, new_dir): """sets new media dir. returns None, None on success, and returns new dir name if db and player need to be remapped to new dirs""" old_dir = self._media_dir if new_dir == old_dir: return None, None std_loc = os.path.join(utils.get_home(), 'media') #stat new folder if not os.access(new_dir, os.F_OK & os.R_OK & os.W_OK & os.X_OK): raise NoDir, "insufficient permissions to access %s" % new_dir try: os.symlink HAVE_SYMLINK = True except: HAVE_SYMLINK = False if HAVE_SYMLINK: if old_dir == std_loc: self._move_contents(std_loc, new_dir) self._media_dir = new_dir if os.path.islink(std_loc): os.remove(std_loc) os.symlink(new_dir, std_loc) else: os.rmdir(std_loc) os.symlink(new_dir, std_loc) return old_dir, std_loc elif new_dir == std_loc: self._media_dir = std_loc if os.path.islink(std_loc): os.remove(std_loc) self._move_contents(old_dir, std_loc) else: os.rmdir(std_loc) os.mkdir(std_loc) self._move_contents(old_dir, std_loc) return old_dir, std_loc else: self._move_contents(old_dir, new_dir) self._media_dir = new_dir if os.path.islink(std_loc): os.remove(std_loc) os.symlink(new_dir, std_loc) else: os.rmdir(std_loc) os.symlink(new_dir, std_loc) return old_dir, std_loc else: self._move_contents(old_dir, new_dir) self._media_dir = new_dir return old_dir, new_dir return None, None def _move_contents(self, src, dst): p = re.compile("\d{4}-\d{2}-\d{2}$") for f in os.listdir(src): if p.search(f) is not None or f.upper().endswith('M3U'): shutil.move(os.path.join(src, f), os.path.join(dst, f)) def get_media_dir(self): return self._media_dir def __online_status_changed(self, app, connected): if not connected: app.pause_downloads() else: if not self._net_connected: self.unpause_downloads() app.resume_resumable() self._net_connected = connected def __feed_name_changed_cb(self, app, feed_id, oldname, name): oldname = utils.make_pathsafe(oldname) name = utils.make_pathsafe(name) old_dir = os.path.join(self._media_dir, oldname) new_dir = os.path.join(self._media_dir, name) if os.path.isdir(old_dir): os.rename(old_dir, new_dir) def __new_database_cb(self, app, db): self.db = db def set_bt_settings(self, bt_settings): self.bt_settings = bt_settings def get_id(self): cur_time = int(time.time()) if self.id_time == cur_time: self.time_appendix = self.time_appendix+1 else: self.id_time = cur_time self.time_appendix=0 return str(self.id_time)+"+"+str(self.time_appendix) def show_downloads(self): if self._style==BYDATE: url = "file://"+self._media_dir+"/"+utils.get_dated_dir() else: url = "file://"+self._media_dir if HAS_GNOME: gnome.url_show(url) else: import webbrowser webbrowser.open_new_tab(url) def download_entry(self, entry_id, queue=False, resume=False): """queues a download will interact with bittorrent python use btlaunchmany code to write our own downloader just need to change init funcs, hijack status funcs, add cancelling""" media_list = self.db.get_entry_media(entry_id) if len(media_list)==0: return for media in media_list: self.download(media['media_id'], queue, resume) def download(self, media_id, queue=False, resume=False): """queues a download""" for downloader in self.downloads: if downloader.media['media_id'] == media_id: self.downloads.remove(downloader) break media = self.db.get_media(media_id) media['feedname'] = self.db.get_feed_title(media['feed_id']) media['downloader_index']=self.index media['download_status']=1 media.setdefault('size',0) if media['file'] is None: #logging.debug("TEMP OVERRIDE OF FILENAME?") #if True: filename = os.path.basename(media['url']) filen, ext = os.path.splitext(filename) ext = ext.split('?')[0] #grrr lugradio... #media['file']=os.path.join(self._media_dir, utils.get_dated_dir(), filen+ext) media['file']=self.get_storage_dir(media, filen+ext) dated_dir = os.path.split(os.path.split(media['file'])[0])[1] try: #make sure os.stat(os.path.join(self._media_dir, dated_dir)) except: os.mkdir(os.path.join(self._media_dir, dated_dir)) if self.db.media_exists(media['file']): #if the filename is in the db, rename #media['file']=os.path.join(self._media_dir, utils.get_dated_dir(), filen+"-"+self.get_id()+ext) media['file']=self.get_storage_dir(media, filen+"-"+self.get_id()+ext) else: try: os.stat(media['file']) #if this raises exception, the file doesn't exist and we're ok #media['file']=os.path.join(self._media_dir, utils.get_dated_dir(), filen+"-"+self.get_id()+ext) #if not, get new name media['file']=self.get_storage_dir(media, filen+"-"+self.get_id()+ext) #if not, get new name except: pass #we're ok if not resume: self.db.delete_media(media_id) #else: # print "resuming using existing filename: "+str(media['file']) extension = os.path.splitext(media['url'])[1] if media['mimetype'] == 'application/x-bittorrent' or extension.upper()==".TORRENT": params = [ '--minport', str(self.bt_settings['min_port']), '--maxport', str(self.bt_settings['max_port']), '--max_upload_rate', str(self.bt_settings['ul_limit'])] import BTDownloader downloader = BTDownloader.BTDownloader(media, self._media_dir, params,True, queue, self.callback_progress,self.callback_finished) self.downloads.append(downloader) self.pool.queueTask(downloader.download) else: #http regular download ext = os.path.splitext(media['file'])[1] if len(ext)>5 or len(ext)==0: #I think this isn't really the right extension. See fucking ask a ninja: http://feeds.feedburner.com/AskANinja try: import mimetypes real_ext = mimetypes.guess_extension(media['mimetype']) if real_ext is not None: media['file']=media['file']+real_ext except: print "ERROR couldn't guess mimetype, leaving filename alone" import HTTPDownloader downloader = HTTPDownloader.HTTPDownloader(media, self._media_dir, None, resume, queue, self.callback_progress, self.callback_finished) self.downloads.append(downloader) self.pool.queueTask(downloader.download) #self.db.set_media_download_status(media['media_id'],1) #self.db.set_media_filename(media['media_id'],media['file']) self.db.set_media(media['media_id'], status=1, filename=media['file']) self.index=self.index+1 def has_downloader(self, media_id): for download in self.downloads: if download.media['media_id'] == media_id: return True return False def get_downloader(self, media_id): for download in self.downloads: if download.media['media_id'] == media_id: return download raise DownloadNotFound, media_id def get_download_list(self, status=None): list = [] if status is not None: list = [d for d in self.downloads if d.status == status] else: list = copy.copy(self.downloads) return list def _basic_finished_callback(self, data): print data self.db.set_media_download_status(data[0]['media_id'],ptvDB.D_DOWNLOADED) def _basic_progress_callback(self, data): print os.path.split(data[0]['file'])[1]+" "+data[2] def callback_progress(self, obj): #print "mediamanager progress" return self.app_callback_progress(obj) def callback_finished(self, obj): if obj.status in [STOPPED, FINISHED, FINISHED_AND_PLAY, FAILURE]: try: self.downloads.remove(obj) except: print "Warning: couldn't remove download", obj.media self.update_playlist(obj.media) #if self.pause_state == RUNNING: self.app_callback_finished(obj) def get_download_count(self): try: #return self.pool.getTaskCount() return len(self.downloads) except: return 0 def stop_all_downloads(self): #try: if self.pause_state == RUNNING: for download in self.downloads: download.stop() #if not download.status == Downloader.DOWNLOADING: #send signal for all queued downloads # self.finished_callback(download, (download.media,MediaManager.STOPPED,None)) try: self.pool.joinAll(False,True) #don't wait for tasks, but let the threads die naturally except AttributeError: logging.warning("no pool to delete, no problem") #reset self.downloads = [] self.pause_state = PAUSED #except: # pass def pause_all_downloads(self): if self.pause_state == RUNNING: for download in self.downloads: download.pause() self.pool.joinAll(False,True) #don't wait for tasks, but let the threads die naturally self.pause_state = PAUSED def unpause_downloads(self): """DOES NOT requeue downloads. Just clears the state""" self.pause_state = RUNNING def stop_download(self, media_id): if self.has_downloader(media_id): downloader = self.get_downloader(media_id) if downloader.status == QUEUED: #if it's queued, we can stop it directly #the threadpool will still hold on to the object, but #when it tries to run it will see that it has been stopped downloader.stop() self.update_playlist(downloader.media) self.app_callback_finished(downloader) self.downloads.remove(downloader) else: downloader.stop() def get_disk_usage(self): size = 0 if utils.RUNNING_HILDON: #this is much faster on maemo, which sucks at mmc disk access import subprocess if not os.path.isdir(self._media_dir): return 0 cmd = "du -sk %s" % self._media_dir p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval = p.wait() stderr = p.stderr.read() if len(stderr) > 1 or retval != 0: return 0 retval = p.stdout.read().split('\t')[0] size = long(retval)*1024L else: try: #filelist = glob.glob(self._media_dir+"/*") for f in utils.GlobDirectoryWalker(self._media_dir): size = size+os.stat(f)[6] except: pass return size def generate_playlist(self): if utils.RUNNING_SUGAR: return if self._style == BYDATE: import glob dated_dir = utils.get_dated_dir() try: os.stat(os.path.join(self._media_dir, dated_dir)) except: os.mkdir(os.path.join(self._media_dir, dated_dir)) f = open(os.path.join(self._media_dir, dated_dir, "playlist.m3u"),'w') f.write('#EXTM3U\n') for item in glob.glob(os.path.join(self._media_dir, dated_dir, "*")): filename = os.path.split(item)[1] if filename != "playlist.m3u": f.write(filename+"\n") f.close() def update_playlist(self, media): """Adds media to the playlist in its directory""" if utils.RUNNING_SUGAR: return try: os.stat(media['file']) except: return dated_dir = os.path.split(os.path.split(media['file'])[0])[1] try: os.stat(os.path.join(self._media_dir, dated_dir, "playlist.m3u")) f = open(os.path.join(self._media_dir, dated_dir, "playlist.m3u"),'a') except: f = open(os.path.join(self._media_dir, dated_dir, "playlist.m3u"),'w') f.write('#EXTM3U\n') f.write(os.path.split(media['file'])[1]+"\n") f.close() def set_storage_style(self, style, migrate=False): if self._style == style: return self._style = style if migrate: #migrate the media from one style to the other if self._style == BYDATE: self.db.set_media_storage_style_dated(self._media_dir) else: self.db.set_media_storage_style_named(self._media_dir) def get_storage_style(self): return self._style def get_storage_dir(self, media, filename): if self._style == BYDATE: return os.path.join(self._media_dir, utils.get_dated_dir(), filename) elif self._style == BYNAME: return os.path.join(self._media_dir, utils.make_pathsafe(media['feedname']), filename) else: logging.error("Bad storage style (not 0 or 1): %i" % self._style) assert False class NoDir(Exception): def __init__(self,durr): self.durr = durr def __str__(self): return "no such directory: "+self.durr class DownloadNotFound(Exception): def __init__(self,durr): self.durr = durr def __str__(self): return "download not found: "+str(self.durr) PenguinTV-4.2.0/penguintv/feedparser.py0000755000000000000000000036176111120277531015013 0ustar #!/usr/bin/env python """Universal feed parser Handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds Visit http://feedparser.org/ for the latest version Visit http://feedparser.org/docs/ for the latest documentation Required: Python 2.1 or later Recommended: Python 2.3 or later Recommended: CJKCodecs and iconv_codec """ __version__ = "4.1"# + "$Revision: 1.2 $"[11:15] + "-cvs" __license__ = """Copyright (c) 2002-2006, Mark Pilgrim, All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" __author__ = "Mark Pilgrim " __contributors__ = ["Jason Diamond ", "John Beimler ", "Fazal Majid ", "Aaron Swartz ", "Kevin Marks "] _debug = 0 # HTTP "User-Agent" header to send to servers when downloading feeds. # If you are embedding feedparser in a larger application, you should # change this to your application name and URL. #USER_AGENT = "UniversalFeedParser/%s +http://feedparser.org/" % __version__ try: import utils USER_AGENT = "PenguinTV/%s +http://penguintv.sourceforge.net/" % utils.VERSION except: USER_AGENT = "PenguinTV +http://penguintv.sourceforge.net/" import gettext _ = gettext.gettext # HTTP "Accept" header to send to servers when downloading feeds. If you don't # want to send an Accept header, set this to None. #ACCEPT_HEADER = "application/atom+xml,application/rdf+xml,application/rss+xml,application/x-netcdf,application/xml;q=0.9,text/xml;q=0.2,*/*;q=0.1" #OWEN EDIT: #current.com and other sites don't like this, just turn it off ACCEPT_HEADER = None # List of preferred XML parsers, by SAX driver name. These will be tried first, # but if they're not installed, Python will keep searching through its own list # of pre-installed parsers until it finds one that supports everything we need. PREFERRED_XML_PARSERS = ["drv_libxml2"] # If you want feedparser to automatically run HTML markup through HTML Tidy, set # this to 1. Requires mxTidy # or utidylib . TIDY_MARKUP = 0 # List of Python interfaces for HTML Tidy, in order of preference. Only useful # if TIDY_MARKUP = 1 PREFERRED_TIDY_INTERFACES = ["uTidy", "mxTidy"] # ---------- required modules (should come with any Python distribution) ---------- import sgmllib, re, sys, copy, urlparse, time, rfc822, types, cgi, urllib, urllib2 try: from cStringIO import StringIO as _StringIO except: from StringIO import StringIO as _StringIO # ---------- optional modules (feedparser will work without these, but with reduced functionality) ---------- # gzip is included with most Python distributions, but may not be available if you compiled your own try: import gzip except: gzip = None try: import zlib except: zlib = None # If a real XML parser is available, feedparser will attempt to use it. feedparser has # been tested with the built-in SAX parser, PyXML, and libxml2. On platforms where the # Python distribution does not come with an XML parser (such as Mac OS X 10.2 and some # versions of FreeBSD), feedparser will quietly fall back on regex-based parsing. try: import xml.sax xml.sax.make_parser(PREFERRED_XML_PARSERS) # test for valid parsers from xml.sax.saxutils import escape as _xmlescape _XML_AVAILABLE = 1 except: _XML_AVAILABLE = 0 def _xmlescape(data): data = data.replace('&', '&') data = data.replace('>', '>') data = data.replace('<', '<') return data # base64 support for Atom feeds that contain embedded binary data try: import base64, binascii except: base64 = binascii = None # cjkcodecs and iconv_codec provide support for more character encodings. # Both are available from http://cjkpython.i18n.org/ try: import cjkcodecs.aliases except: pass try: import iconv_codec except: pass # chardet library auto-detects character encodings # Download from http://chardet.feedparser.org/ try: import chardet if _debug: import chardet.constants chardet.constants._debug = 1 except: chardet = None # ---------- don't touch these ---------- class ThingsNobodyCaresAboutButMe(Exception): pass class CharacterEncodingOverride(ThingsNobodyCaresAboutButMe): pass class CharacterEncodingUnknown(ThingsNobodyCaresAboutButMe): pass class NonXMLContentType(ThingsNobodyCaresAboutButMe): pass class UndeclaredNamespace(Exception): pass sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*') sgmllib.special = re.compile('' % (tag, ''.join([' %s="%s"' % t for t in attrs])), escape=0) # match namespaces if tag.find(':') <> -1: prefix, suffix = tag.split(':', 1) else: prefix, suffix = '', tag prefix = self.namespacemap.get(prefix, prefix) if prefix: prefix = prefix + '_' # special hack for better tracking of empty textinput/image elements in illformed feeds if (not prefix) and tag not in ('title', 'link', 'description', 'name'): self.intextinput = 0 if (not prefix) and tag not in ('title', 'link', 'description', 'url', 'href', 'width', 'height'): self.inimage = 0 # call special handler (if defined) or default handler methodname = '_start_' + prefix + suffix try: method = getattr(self, methodname) return method(attrsD) except AttributeError: return self.push(prefix + suffix, 1) def unknown_endtag(self, tag): if _debug: sys.stderr.write('end %s\n' % tag) # match namespaces if tag.find(':') <> -1: prefix, suffix = tag.split(':', 1) else: prefix, suffix = '', tag prefix = self.namespacemap.get(prefix, prefix) if prefix: prefix = prefix + '_' # call special handler (if defined) or default handler methodname = '_end_' + prefix + suffix try: method = getattr(self, methodname) method() except AttributeError: self.pop(prefix + suffix) # track inline content if self.incontent and self.contentparams.has_key('type') and not self.contentparams.get('type', 'xml').endswith('xml'): # element declared itself as escaped markup, but it isn't really self.contentparams['type'] = 'application/xhtml+xml' if self.incontent and self.contentparams.get('type') == 'application/xhtml+xml': tag = tag.split(':')[-1] self.handle_data('' % tag, escape=0) # track xml:base and xml:lang going out of scope if self.basestack: self.basestack.pop() if self.basestack and self.basestack[-1]: self.baseuri = self.basestack[-1] if self.langstack: self.langstack.pop() if self.langstack: # and (self.langstack[-1] is not None): self.lang = self.langstack[-1] def handle_charref(self, ref): # called for each character reference, e.g. for ' ', ref will be '160' if not self.elementstack: return ref = ref.lower() if ref in ('34', '38', '39', '60', '62', 'x22', 'x26', 'x27', 'x3c', 'x3e'): text = '&#%s;' % ref else: if ref[0] == 'x': c = int(ref[1:], 16) else: c = int(ref) text = unichr(c).encode('utf-8') self.elementstack[-1][2].append(text) def handle_entityref(self, ref): # called for each entity reference, e.g. for '©', ref will be 'copy' if not self.elementstack: return if _debug: sys.stderr.write('entering handle_entityref with %s\n' % ref) if ref in ('lt', 'gt', 'quot', 'amp', 'apos'): text = '&%s;' % ref else: # entity resolution graciously donated by Aaron Swartz def name2cp(k): import htmlentitydefs if hasattr(htmlentitydefs, 'name2codepoint'): # requires Python 2.3 return htmlentitydefs.name2codepoint[k] k = htmlentitydefs.entitydefs[k] if k.startswith('&#') and k.endswith(';'): return int(k[2:-1]) # not in latin-1 return ord(k) try: name2cp(ref) except KeyError: text = '&%s;' % ref else: text = unichr(name2cp(ref)).encode('utf-8') self.elementstack[-1][2].append(text) def handle_data(self, text, escape=1): # called for each block of plain text, i.e. outside of any tag and # not containing any character or entity references if not self.elementstack: return if escape and self.contentparams.get('type') == 'application/xhtml+xml': text = _xmlescape(text) self.elementstack[-1][2].append(text) def handle_comment(self, text): # called for each comment, e.g. pass def handle_pi(self, text): # called for each processing instruction, e.g. pass def handle_decl(self, text): pass def parse_declaration(self, i): # override internal declaration handler to handle CDATA blocks if _debug: sys.stderr.write('entering parse_declaration\n') if self.rawdata[i:i+9] == '', i) if k == -1: k = len(self.rawdata) self.handle_data(_xmlescape(self.rawdata[i+9:k]), 0) return k+3 else: k = self.rawdata.find('>', i) return k+1 def mapContentType(self, contentType): contentType = contentType.lower() if contentType == 'text': contentType = 'text/plain' elif contentType == 'html': contentType = 'text/html' elif contentType == 'xhtml': contentType = 'application/xhtml+xml' return contentType def trackNamespace(self, prefix, uri): loweruri = uri.lower() if (prefix, loweruri) == (None, 'http://my.netscape.com/rdf/simple/0.9/') and not self.version: self.version = 'rss090' if loweruri == 'http://purl.org/rss/1.0/' and not self.version: self.version = 'rss10' if loweruri == 'http://www.w3.org/2005/atom' and not self.version: self.version = 'atom10' if loweruri.find('backend.userland.com/rss') <> -1: # match any backend.userland.com namespace uri = 'http://backend.userland.com/rss' loweruri = uri if self._matchnamespaces.has_key(loweruri): self.namespacemap[prefix] = self._matchnamespaces[loweruri] self.namespacesInUse[self._matchnamespaces[loweruri]] = uri else: self.namespacesInUse[prefix or ''] = uri def resolveURI(self, uri): return _urljoin(self.baseuri or '', uri) def decodeEntities(self, element, data): return data def push(self, element, expectingText): self.elementstack.append([element, expectingText, []]) def pop(self, element, stripWhitespace=1): if not self.elementstack: return if self.elementstack[-1][0] != element: return element, expectingText, pieces = self.elementstack.pop() output = ''.join(pieces) if stripWhitespace: output = output.strip() if not expectingText: return output # decode base64 content if base64 and self.contentparams.get('base64', 0): try: output = base64.decodestring(output) except binascii.Error: pass except binascii.Incomplete: pass # resolve relative URIs if (element in self.can_be_relative_uri) and output: output = self.resolveURI(output) # decode entities within embedded markup if not self.contentparams.get('base64', 0): output = self.decodeEntities(element, output) # remove temporary cruft from contentparams try: del self.contentparams['mode'] except KeyError: pass try: del self.contentparams['base64'] except KeyError: pass # resolve relative URIs within embedded markup if self.mapContentType(self.contentparams.get('type', 'text/html')) in self.html_types: if element in self.can_contain_relative_uris: output = _resolveRelativeURIs(output, self.baseuri, self.encoding) # sanitize embedded markup if self.mapContentType(self.contentparams.get('type', 'text/html')) in self.html_types: if element in self.can_contain_dangerous_markup: output = _sanitizeHTML(output, self.encoding) if self.encoding and type(output) != type(u''): try: output = unicode(output, self.encoding) except: pass # categories/tags/keywords/whatever are handled in _end_category if element == 'category': return output # store output in appropriate place(s) if self.inentry and not self.insource: if element == 'content': self.entries[-1].setdefault(element, []) contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output self.entries[-1][element].append(contentparams) elif element == 'link': self.entries[-1][element] = output if output: self.entries[-1]['links'][-1]['href'] = output else: if element == 'description': element = 'summary' self.entries[-1][element] = output if self.incontent: contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output self.entries[-1][element + '_detail'] = contentparams elif (self.infeed or self.insource) and (not self.intextinput) and (not self.inimage): context = self._getContext() if element == 'description': element = 'subtitle' context[element] = output if element == 'link': context['links'][-1]['href'] = output elif self.incontent: contentparams = copy.deepcopy(self.contentparams) contentparams['value'] = output context[element + '_detail'] = contentparams return output def pushContent(self, tag, attrsD, defaultContentType, expectingText): self.incontent += 1 self.contentparams = FeedParserDict({ 'type': self.mapContentType(attrsD.get('type', defaultContentType)), 'language': self.lang, 'base': self.baseuri}) self.contentparams['base64'] = self._isBase64(attrsD, self.contentparams) self.push(tag, expectingText) def popContent(self, tag): value = self.pop(tag) self.incontent -= 1 self.contentparams.clear() return value def _mapToStandardPrefix(self, name): colonpos = name.find(':') if colonpos <> -1: prefix = name[:colonpos] suffix = name[colonpos+1:] prefix = self.namespacemap.get(prefix, prefix) name = prefix + ':' + suffix return name def _getAttribute(self, attrsD, name): return attrsD.get(self._mapToStandardPrefix(name)) def _isBase64(self, attrsD, contentparams): if attrsD.get('mode', '') == 'base64': return 1 if self.contentparams['type'].startswith('text/'): return 0 if self.contentparams['type'].endswith('+xml'): return 0 if self.contentparams['type'].endswith('/xml'): return 0 return 1 def _itsAnHrefDamnIt(self, attrsD): href = attrsD.get('url', attrsD.get('uri', attrsD.get('href', None))) if href: try: del attrsD['url'] except KeyError: pass try: del attrsD['uri'] except KeyError: pass attrsD['href'] = href return attrsD def _save(self, key, value): context = self._getContext() context.setdefault(key, value) def _start_rss(self, attrsD): versionmap = {'0.91': 'rss091u', '0.92': 'rss092', '0.93': 'rss093', '0.94': 'rss094'} if not self.version: attr_version = attrsD.get('version', '') version = versionmap.get(attr_version) if version: self.version = version elif attr_version.startswith('2.'): self.version = 'rss20' else: self.version = 'rss' def _start_dlhottitles(self, attrsD): self.version = 'hotrss' def _start_channel(self, attrsD): self.infeed = 1 self._cdf_common(attrsD) _start_feedinfo = _start_channel def _cdf_common(self, attrsD): if attrsD.has_key('lastmod'): self._start_modified({}) self.elementstack[-1][-1] = attrsD['lastmod'] self._end_modified() if attrsD.has_key('href'): self._start_link({}) self.elementstack[-1][-1] = attrsD['href'] self._end_link() def _start_feed(self, attrsD): self.infeed = 1 versionmap = {'0.1': 'atom01', '0.2': 'atom02', '0.3': 'atom03'} if not self.version: attr_version = attrsD.get('version') version = versionmap.get(attr_version) if version: self.version = version else: self.version = 'atom' def _end_channel(self): self.infeed = 0 _end_feed = _end_channel def _start_image(self, attrsD): self.inimage = 1 self.push('image', 0) context = self._getContext() context.setdefault('image', FeedParserDict()) def _end_image(self): self.pop('image') self.inimage = 0 def _start_textinput(self, attrsD): self.intextinput = 1 self.push('textinput', 0) context = self._getContext() context.setdefault('textinput', FeedParserDict()) _start_textInput = _start_textinput def _end_textinput(self): self.pop('textinput') self.intextinput = 0 _end_textInput = _end_textinput def _start_author(self, attrsD): self.inauthor = 1 self.push('author', 1) _start_managingeditor = _start_author _start_dc_author = _start_author _start_dc_creator = _start_author _start_itunes_author = _start_author def _end_author(self): self.pop('author') self.inauthor = 0 self._sync_author_detail() _end_managingeditor = _end_author _end_dc_author = _end_author _end_dc_creator = _end_author _end_itunes_author = _end_author def _start_itunes_owner(self, attrsD): self.inpublisher = 1 self.push('publisher', 0) def _end_itunes_owner(self): self.pop('publisher') self.inpublisher = 0 self._sync_author_detail('publisher') def _start_contributor(self, attrsD): self.incontributor = 1 context = self._getContext() context.setdefault('contributors', []) context['contributors'].append(FeedParserDict()) self.push('contributor', 0) def _end_contributor(self): self.pop('contributor') self.incontributor = 0 def _start_dc_contributor(self, attrsD): self.incontributor = 1 context = self._getContext() context.setdefault('contributors', []) context['contributors'].append(FeedParserDict()) self.push('name', 0) def _end_dc_contributor(self): self._end_name() self.incontributor = 0 def _start_name(self, attrsD): self.push('name', 0) _start_itunes_name = _start_name def _end_name(self): value = self.pop('name') if self.inpublisher: self._save_author('name', value, 'publisher') elif self.inauthor: self._save_author('name', value) elif self.incontributor: self._save_contributor('name', value) elif self.intextinput: context = self._getContext() context['textinput']['name'] = value _end_itunes_name = _end_name def _start_width(self, attrsD): self.push('width', 0) def _end_width(self): value = self.pop('width') try: value = int(value) except: value = 0 if self.inimage: context = self._getContext() context['image']['width'] = value def _start_height(self, attrsD): self.push('height', 0) def _end_height(self): value = self.pop('height') try: value = int(value) except: value = 0 if self.inimage: context = self._getContext() context['image']['height'] = value def _start_url(self, attrsD): self.push('href', 1) _start_homepage = _start_url _start_uri = _start_url def _end_url(self): value = self.pop('href') if self.inauthor: self._save_author('href', value) elif self.incontributor: self._save_contributor('href', value) elif self.inimage: context = self._getContext() context['image']['href'] = value elif self.intextinput: context = self._getContext() context['textinput']['link'] = value _end_homepage = _end_url _end_uri = _end_url def _start_email(self, attrsD): self.push('email', 0) _start_itunes_email = _start_email def _end_email(self): value = self.pop('email') if self.inpublisher: self._save_author('email', value, 'publisher') elif self.inauthor: self._save_author('email', value) elif self.incontributor: self._save_contributor('email', value) _end_itunes_email = _end_email def _getContext(self): if self.insource: context = self.sourcedata elif self.inentry: context = self.entries[-1] else: context = self.feeddata return context def _save_author(self, key, value, prefix='author'): context = self._getContext() context.setdefault(prefix + '_detail', FeedParserDict()) context[prefix + '_detail'][key] = value self._sync_author_detail() def _save_contributor(self, key, value): context = self._getContext() context.setdefault('contributors', [FeedParserDict()]) context['contributors'][-1][key] = value def _sync_author_detail(self, key='author'): context = self._getContext() detail = context.get('%s_detail' % key) if detail: name = detail.get('name') email = detail.get('email') if name and email: context[key] = '%s (%s)' % (name, email) elif name: context[key] = name elif email: context[key] = email else: author = context.get(key) if not author: return emailmatch = re.search(r'''(([a-zA-Z0-9\_\-\.\+]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?))''', author) if not emailmatch: return email = emailmatch.group(0) # probably a better way to do the following, but it passes all the tests author = author.replace(email, '') author = author.replace('()', '') author = author.strip() if author and (author[0] == '('): author = author[1:] if author and (author[-1] == ')'): author = author[:-1] author = author.strip() context.setdefault('%s_detail' % key, FeedParserDict()) context['%s_detail' % key]['name'] = author context['%s_detail' % key]['email'] = email def _start_subtitle(self, attrsD): self.pushContent('subtitle', attrsD, 'text/plain', 1) _start_tagline = _start_subtitle _start_itunes_subtitle = _start_subtitle def _end_subtitle(self): self.popContent('subtitle') _end_tagline = _end_subtitle _end_itunes_subtitle = _end_subtitle def _start_rights(self, attrsD): self.pushContent('rights', attrsD, 'text/plain', 1) _start_dc_rights = _start_rights _start_copyright = _start_rights def _end_rights(self): self.popContent('rights') _end_dc_rights = _end_rights _end_copyright = _end_rights def _start_item(self, attrsD): self.entries.append(FeedParserDict()) self.push('item', 0) self.inentry = 1 self.guidislink = 0 id = self._getAttribute(attrsD, 'rdf:about') if id: context = self._getContext() context['id'] = id self._cdf_common(attrsD) _start_entry = _start_item _start_product = _start_item def _end_item(self): self.pop('item') self.inentry = 0 _end_entry = _end_item def _start_dc_language(self, attrsD): self.push('language', 1) _start_language = _start_dc_language def _end_dc_language(self): self.lang = self.pop('language') _end_language = _end_dc_language def _start_dc_publisher(self, attrsD): self.push('publisher', 1) _start_webmaster = _start_dc_publisher def _end_dc_publisher(self): self.pop('publisher') self._sync_author_detail('publisher') _end_webmaster = _end_dc_publisher def _start_published(self, attrsD): self.push('published', 1) _start_dcterms_issued = _start_published _start_issued = _start_published def _end_published(self): value = self.pop('published') self._save('published_parsed', _parse_date(value)) _end_dcterms_issued = _end_published _end_issued = _end_published def _start_updated(self, attrsD): self.push('updated', 1) _start_modified = _start_updated _start_dcterms_modified = _start_updated _start_pubdate = _start_updated _start_dc_date = _start_updated def _end_updated(self): value = self.pop('updated') parsed_value = _parse_date(value) self._save('updated_parsed', parsed_value) _end_modified = _end_updated _end_dcterms_modified = _end_updated _end_pubdate = _end_updated _end_dc_date = _end_updated def _start_created(self, attrsD): self.push('created', 1) _start_dcterms_created = _start_created def _end_created(self): value = self.pop('created') self._save('created_parsed', _parse_date(value)) _end_dcterms_created = _end_created def _start_expirationdate(self, attrsD): self.push('expired', 1) def _end_expirationdate(self): self._save('expired_parsed', _parse_date(self.pop('expired'))) def _start_cc_license(self, attrsD): self.push('license', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('license') def _start_creativecommons_license(self, attrsD): self.push('license', 1) def _end_creativecommons_license(self): self.pop('license') def _addTag(self, term, scheme, label): context = self._getContext() tags = context.setdefault('tags', []) if (not term) and (not scheme) and (not label): return value = FeedParserDict({'term': term, 'scheme': scheme, 'label': label}) if value not in tags: tags.append(FeedParserDict({'term': term, 'scheme': scheme, 'label': label})) def _start_category(self, attrsD): if _debug: sys.stderr.write('entering _start_category with %s\n' % repr(attrsD)) term = attrsD.get('term') scheme = attrsD.get('scheme', attrsD.get('domain')) label = attrsD.get('label') self._addTag(term, scheme, label) self.push('category', 1) _start_dc_subject = _start_category _start_keywords = _start_category def _end_itunes_keywords(self): for term in self.pop('itunes_keywords').split(): self._addTag(term, 'http://www.itunes.com/', None) def _start_itunes_category(self, attrsD): self._addTag(attrsD.get('text'), 'http://www.itunes.com/', None) self.push('category', 1) def _end_category(self): value = self.pop('category') if not value: return context = self._getContext() tags = context['tags'] if value and len(tags) and not tags[-1]['term']: tags[-1]['term'] = value else: self._addTag(value, None, None) _end_dc_subject = _end_category _end_keywords = _end_category _end_itunes_category = _end_category def _start_cloud(self, attrsD): self._getContext()['cloud'] = FeedParserDict(attrsD) def _start_link(self, attrsD): attrsD.setdefault('rel', 'alternate') attrsD.setdefault('type', 'text/html') attrsD = self._itsAnHrefDamnIt(attrsD) if attrsD.has_key('href'): attrsD['href'] = self.resolveURI(attrsD['href']) expectingText = self.infeed or self.inentry or self.insource context = self._getContext() context.setdefault('links', []) context['links'].append(FeedParserDict(attrsD)) if attrsD['rel'] == 'enclosure': self._start_enclosure(attrsD) if attrsD.has_key('href'): expectingText = 0 if (attrsD.get('rel') == 'alternate') and (self.mapContentType(attrsD.get('type')) in self.html_types): context['link'] = attrsD['href'] else: self.push('link', expectingText) _start_producturl = _start_link def _end_link(self): value = self.pop('link') context = self._getContext() if self.intextinput: context['textinput']['link'] = value if self.inimage: context['image']['link'] = value _end_producturl = _end_link def _start_guid(self, attrsD): self.guidislink = (attrsD.get('ispermalink', 'true') == 'true') self.push('id', 1) def _end_guid(self): value = self.pop('id') self._save('guidislink', self.guidislink and not self._getContext().has_key('link')) if self.guidislink: # guid acts as link, but only if 'ispermalink' is not present or is 'true', # and only if the item doesn't already have a link element self._save('link', value) def _start_title(self, attrsD): self.pushContent('title', attrsD, 'text/plain', self.infeed or self.inentry or self.insource) def _start_title_low_pri(self, attrsD): if not self._getContext().has_key('title'): self._start_title(attrsD) _start_dc_title = _start_title_low_pri _start_media_title = _start_title_low_pri def _end_title(self): value = self.popContent('title') context = self._getContext() if self.intextinput: context['textinput']['title'] = value elif self.inimage: context['image']['title'] = value def _end_title_low_pri(self): if not self._getContext().has_key('title'): self._end_title() _end_dc_title = _end_title_low_pri _end_media_title = _end_title_low_pri def _start_description(self, attrsD): context = self._getContext() if context.has_key('summary'): self._summaryKey = 'content' self._start_content(attrsD) else: self.pushContent('description', attrsD, 'text/html', self.infeed or self.inentry or self.insource) def _start_abstract(self, attrsD): self.pushContent('description', attrsD, 'text/plain', self.infeed or self.inentry or self.insource) def _end_description(self): if self._summaryKey == 'content': self._end_content() else: value = self.popContent('description') context = self._getContext() if self.intextinput: context['textinput']['description'] = value elif self.inimage: context['image']['description'] = value self._summaryKey = None _end_abstract = _end_description def _start_info(self, attrsD): self.pushContent('info', attrsD, 'text/plain', 1) _start_feedburner_browserfriendly = _start_info def _end_info(self): self.popContent('info') _end_feedburner_browserfriendly = _end_info def _start_generator(self, attrsD): if attrsD: attrsD = self._itsAnHrefDamnIt(attrsD) if attrsD.has_key('href'): attrsD['href'] = self.resolveURI(attrsD['href']) self._getContext()['generator_detail'] = FeedParserDict(attrsD) self.push('generator', 1) def _end_generator(self): value = self.pop('generator') context = self._getContext() if context.has_key('generator_detail'): context['generator_detail']['name'] = value def _start_admin_generatoragent(self, attrsD): self.push('generator', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('generator') self._getContext()['generator_detail'] = FeedParserDict({'href': value}) def _start_admin_errorreportsto(self, attrsD): self.push('errorreportsto', 1) value = self._getAttribute(attrsD, 'rdf:resource') if value: self.elementstack[-1][2].append(value) self.pop('errorreportsto') def _start_summary(self, attrsD): context = self._getContext() if context.has_key('summary'): self._summaryKey = 'content' self._start_content(attrsD) else: self._summaryKey = 'summary' self.pushContent(self._summaryKey, attrsD, 'text/plain', 1) _start_itunes_summary = _start_summary def _end_summary(self): if self._summaryKey == 'content': self._end_content() else: self.popContent(self._summaryKey or 'summary') self._summaryKey = None _end_itunes_summary = _end_summary def _start_enclosure(self, attrsD): attrsD = self._itsAnHrefDamnIt(attrsD) self._getContext().setdefault('enclosures', []).append(FeedParserDict(attrsD)) href = attrsD.get('href') if href: context = self._getContext() if not context.get('id'): context['id'] = href def _start_source(self, attrsD): self.insource = 1 def _end_source(self): self.insource = 0 self._getContext()['source'] = copy.deepcopy(self.sourcedata) self.sourcedata.clear() def _start_content(self, attrsD): self.pushContent('content', attrsD, 'text/plain', 1) src = attrsD.get('src') if src: self.contentparams['src'] = src self.push('content', 1) def _start_prodlink(self, attrsD): self.pushContent('content', attrsD, 'text/html', 1) def _start_body(self, attrsD): self.pushContent('content', attrsD, 'application/xhtml+xml', 1) _start_xhtml_body = _start_body def _start_content_encoded(self, attrsD): self.pushContent('content', attrsD, 'text/html', 1) _start_fullitem = _start_content_encoded def _end_content(self): copyToDescription = self.mapContentType(self.contentparams.get('type')) in (['text/plain'] + self.html_types) value = self.popContent('content') if copyToDescription: self._save('description', value) _end_body = _end_content _end_xhtml_body = _end_content _end_content_encoded = _end_content _end_fullitem = _end_content _end_prodlink = _end_content def _start_itunes_image(self, attrsD): self.push('itunes_image', 0) self._getContext()['image'] = FeedParserDict({'href': attrsD.get('href')}) _start_itunes_link = _start_itunes_image def _end_itunes_block(self): value = self.pop('itunes_block', 0) self._getContext()['itunes_block'] = (value == 'yes') and 1 or 0 def _end_itunes_explicit(self): value = self.pop('itunes_explicit', 0) self._getContext()['itunes_explicit'] = (value == 'yes') and 1 or 0 if _XML_AVAILABLE: class _StrictFeedParser(_FeedParserMixin, xml.sax.handler.ContentHandler): def __init__(self, baseuri, baselang, encoding): if _debug: sys.stderr.write('trying StrictFeedParser\n') xml.sax.handler.ContentHandler.__init__(self) _FeedParserMixin.__init__(self, baseuri, baselang, encoding) self.bozo = 0 self.exc = None def startPrefixMapping(self, prefix, uri): self.trackNamespace(prefix, uri) def startElementNS(self, name, qname, attrs): namespace, localname = name lowernamespace = str(namespace or '').lower() if lowernamespace.find('backend.userland.com/rss') <> -1: # match any backend.userland.com namespace namespace = 'http://backend.userland.com/rss' lowernamespace = namespace if qname and qname.find(':') > 0: givenprefix = qname.split(':')[0] else: givenprefix = None prefix = self._matchnamespaces.get(lowernamespace, givenprefix) if givenprefix and (prefix == None or (prefix == '' and lowernamespace == '')) and not self.namespacesInUse.has_key(givenprefix): raise UndeclaredNamespace, "'%s' is not associated with a namespace" % givenprefix if prefix: localname = prefix + ':' + localname localname = str(localname).lower() if _debug: sys.stderr.write('startElementNS: qname = %s, namespace = %s, givenprefix = %s, prefix = %s, attrs = %s, localname = %s\n' % (qname, namespace, givenprefix, prefix, attrs.items(), localname)) # qname implementation is horribly broken in Python 2.1 (it # doesn't report any), and slightly broken in Python 2.2 (it # doesn't report the xml: namespace). So we match up namespaces # with a known list first, and then possibly override them with # the qnames the SAX parser gives us (if indeed it gives us any # at all). Thanks to MatejC for helping me test this and # tirelessly telling me that it didn't work yet. attrsD = {} for (namespace, attrlocalname), attrvalue in attrs._attrs.items(): lowernamespace = (namespace or '').lower() prefix = self._matchnamespaces.get(lowernamespace, '') if prefix: attrlocalname = prefix + ':' + attrlocalname attrsD[str(attrlocalname).lower()] = attrvalue for qname in attrs.getQNames(): attrsD[str(qname).lower()] = attrs.getValueByQName(qname) self.unknown_starttag(localname, attrsD.items()) def characters(self, text): self.handle_data(text) def endElementNS(self, name, qname): namespace, localname = name lowernamespace = str(namespace or '').lower() if qname and qname.find(':') > 0: givenprefix = qname.split(':')[0] else: givenprefix = '' prefix = self._matchnamespaces.get(lowernamespace, givenprefix) if prefix: localname = prefix + ':' + localname localname = str(localname).lower() self.unknown_endtag(localname) def error(self, exc): self.bozo = 1 self.exc = exc def fatalError(self, exc): self.error(exc) raise exc class _BaseHTMLProcessor(sgmllib.SGMLParser): elements_no_end_tag = ['area', 'base', 'basefont', 'br', 'col', 'frame', 'hr', 'img', 'input', 'isindex', 'link', 'meta', 'param'] def __init__(self, encoding): self.encoding = encoding if _debug: sys.stderr.write('entering BaseHTMLProcessor, encoding=%s\n' % self.encoding) sgmllib.SGMLParser.__init__(self) def reset(self): self.pieces = [] sgmllib.SGMLParser.reset(self) def _shorttag_replace(self, match): tag = match.group(1) if tag in self.elements_no_end_tag: return '<' + tag + ' />' else: return '<' + tag + '>' def feed(self, data): data = re.compile(r'', self._shorttag_replace, data) # bug [ 1399464 ] Bad regexp for _shorttag_replace data = re.sub(r'<([^<\s]+?)\s*/>', self._shorttag_replace, data) data = data.replace(''', "'") data = data.replace('"', '"') if self.encoding and type(data) == type(u''): data = data.encode(self.encoding) sgmllib.SGMLParser.feed(self, data) def normalize_attrs(self, attrs): # utility method to be called by descendants attrs = [(k.lower(), v) for k, v in attrs] attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs] return attrs def unknown_starttag(self, tag, attrs): # called for each start tag # attrs is a list of (attr, value) tuples # e.g. for
    , tag='pre', attrs=[('class', 'screen')]
            if _debug: sys.stderr.write('_BaseHTMLProcessor, unknown_starttag, tag=%s\n' % tag)
            uattrs = []
            # thanks to Kevin Marks for this breathtaking hack to deal with (valid) high-bit attribute values in UTF-8 feeds
            for key, value in attrs:
                if type(value) != type(u''):
                    value = unicode(value, self.encoding)
                uattrs.append((unicode(key, self.encoding), value))
            strattrs = u''.join([u' %s="%s"' % (key, value) for key, value in uattrs]).encode(self.encoding)
            if tag in self.elements_no_end_tag:
                self.pieces.append('<%(tag)s%(strattrs)s />' % locals())
            else:
                self.pieces.append('<%(tag)s%(strattrs)s>' % locals())
    
        def unknown_endtag(self, tag):
            # called for each end tag, e.g. for 
    , tag will be 'pre' # Reconstruct the original end tag. if tag not in self.elements_no_end_tag: self.pieces.append("" % locals()) def handle_charref(self, ref): # called for each character reference, e.g. for ' ', ref will be '160' # Reconstruct the original character reference. self.pieces.append('&#%(ref)s;' % locals()) def handle_entityref(self, ref): # called for each entity reference, e.g. for '©', ref will be 'copy' # Reconstruct the original entity reference. self.pieces.append('&%(ref)s;' % locals()) def handle_data(self, text): # called for each block of plain text, i.e. outside of any tag and # not containing any character or entity references # Store the original text verbatim. if _debug: sys.stderr.write('_BaseHTMLProcessor, handle_text, text=%s\n' % text) self.pieces.append(text) def handle_comment(self, text): # called for each HTML comment, e.g. # Reconstruct the original comment. self.pieces.append('' % locals()) def handle_pi(self, text): # called for each processing instruction, e.g. # Reconstruct original processing instruction. self.pieces.append('' % locals()) def handle_decl(self, text): # called for the DOCTYPE, if present, e.g. # # Reconstruct original DOCTYPE self.pieces.append('' % locals()) _new_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9:]*\s*').match def _scan_name(self, i, declstartpos): rawdata = self.rawdata n = len(rawdata) if i == n: return None, -1 m = self._new_declname_match(rawdata, i) if m: s = m.group() name = s.strip() if (i + len(s)) == n: return None, -1 # end of buffer return name.lower(), m.end() else: self.handle_data(rawdata) # self.updatepos(declstartpos, i) return None, -1 def output(self): '''Return processed HTML as a single string''' return ''.join([str(p) for p in self.pieces]) class _LooseFeedParser(_FeedParserMixin, _BaseHTMLProcessor): def __init__(self, baseuri, baselang, encoding): sgmllib.SGMLParser.__init__(self) _FeedParserMixin.__init__(self, baseuri, baselang, encoding) def decodeEntities(self, element, data): data = data.replace('<', '<') data = data.replace('<', '<') data = data.replace('>', '>') data = data.replace('>', '>') data = data.replace('&', '&') data = data.replace('&', '&') data = data.replace('"', '"') data = data.replace('"', '"') data = data.replace(''', ''') data = data.replace(''', ''') if self.contentparams.has_key('type') and not self.contentparams.get('type', 'xml').endswith('xml'): data = data.replace('<', '<') data = data.replace('>', '>') data = data.replace('&', '&') data = data.replace('"', '"') data = data.replace(''', "'") return data class _RelativeURIResolver(_BaseHTMLProcessor): relative_uris = [('a', 'href'), ('applet', 'codebase'), ('area', 'href'), ('blockquote', 'cite'), ('body', 'background'), ('del', 'cite'), ('form', 'action'), ('frame', 'longdesc'), ('frame', 'src'), ('iframe', 'longdesc'), ('iframe', 'src'), ('head', 'profile'), ('img', 'longdesc'), ('img', 'src'), ('img', 'usemap'), ('input', 'src'), ('input', 'usemap'), ('ins', 'cite'), ('link', 'href'), ('object', 'classid'), ('object', 'codebase'), ('object', 'data'), ('object', 'usemap'), ('q', 'cite'), ('script', 'src')] def __init__(self, baseuri, encoding): _BaseHTMLProcessor.__init__(self, encoding) self.baseuri = baseuri def resolveURI(self, uri): return _urljoin(self.baseuri, uri) def unknown_starttag(self, tag, attrs): attrs = self.normalize_attrs(attrs) attrs = [(key, ((tag, key) in self.relative_uris) and self.resolveURI(value) or value) for key, value in attrs] _BaseHTMLProcessor.unknown_starttag(self, tag, attrs) def _resolveRelativeURIs(htmlSource, baseURI, encoding): if _debug: sys.stderr.write('entering _resolveRelativeURIs\n') p = _RelativeURIResolver(baseURI, encoding) p.feed(htmlSource) return p.output() class _HTMLSanitizer(_BaseHTMLProcessor): acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'b', 'big', 'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code', 'col', 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'fieldset', 'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins', 'kbd', 'label', 'legend', 'li', 'map', 'menu', 'ol', 'optgroup', 'option', 'p', 'pre', 'q', 's', 'samp', 'select', 'small', 'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var'] acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey', 'action', 'align', 'alt', 'axis', 'border', 'cellpadding', 'cellspacing', 'char', 'charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'cols', 'colspan', 'color', 'compact', 'coords', 'datetime', 'dir', 'disabled', 'enctype', 'for', 'frame', 'headers', 'height', 'href', 'hreflang', 'hspace', 'id', 'ismap', 'label', 'lang', 'longdesc', 'maxlength', 'media', 'method', 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'prompt', 'readonly', 'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src', 'start', 'summary', 'tabindex', 'target', 'title', 'type', 'usemap', 'valign', 'value', 'vspace', 'width'] unacceptable_elements_with_end_tag = ['script', 'applet'] def reset(self): _BaseHTMLProcessor.reset(self) self.unacceptablestack = 0 def unknown_starttag(self, tag, attrs): if not tag in self.acceptable_elements: if tag in self.unacceptable_elements_with_end_tag: self.unacceptablestack += 1 if tag == 'embed': for key, value in attrs: if key == 'type' and value == 'application/x-shockwave-flash': if not self.unacceptablestack: _BaseHTMLProcessor.handle_data(self, _('<Note: There is a Flash applet in the original page for this entry.>')) return attrs = self.normalize_attrs(attrs) attrs = [(key, value) for key, value in attrs if key in self.acceptable_attributes] _BaseHTMLProcessor.unknown_starttag(self, tag, attrs) def unknown_endtag(self, tag): if not tag in self.acceptable_elements: if tag in self.unacceptable_elements_with_end_tag: self.unacceptablestack -= 1 return _BaseHTMLProcessor.unknown_endtag(self, tag) def handle_pi(self, text): pass def handle_decl(self, text): pass def handle_data(self, text): if not self.unacceptablestack: _BaseHTMLProcessor.handle_data(self, text) def _sanitizeHTML(htmlSource, encoding): p = _HTMLSanitizer(encoding) p.feed(htmlSource) data = p.output() if TIDY_MARKUP: # loop through list of preferred Tidy interfaces looking for one that's installed, # then set up a common _tidy function to wrap the interface-specific API. _tidy = None for tidy_interface in PREFERRED_TIDY_INTERFACES: try: if tidy_interface == "uTidy": from tidy import parseString as _utidy def _tidy(data, **kwargs): return str(_utidy(data, **kwargs)) break elif tidy_interface == "mxTidy": from mx.Tidy import Tidy as _mxtidy def _tidy(data, **kwargs): nerrors, nwarnings, data, errordata = _mxtidy.tidy(data, **kwargs) return data break except: pass if _tidy: utf8 = type(data) == type(u'') if utf8: data = data.encode('utf-8') data = _tidy(data, output_xhtml=1, numeric_entities=1, wrap=0, char_encoding="utf8") if utf8: data = unicode(data, 'utf-8') if data.count(''): data = data.split('>', 1)[1] if data.count('= '2.3.3' assert base64 != None user, passw = base64.decodestring(req.headers['Authorization'].split(' ')[1]).split(':') realm = re.findall('realm="([^"]*)"', headers['WWW-Authenticate'])[0] self.add_password(realm, host, user, passw) retry = self.http_error_auth_reqed('www-authenticate', host, req, headers) self.reset_retry_count() return retry except: return self.http_error_default(req, fp, code, msg, headers) def _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers): """URL, filename, or string --> stream This function lets you define parsers that take any input source (URL, pathname to local or network file, or actual data as a string) and deal with it in a uniform manner. Returned object is guaranteed to have all the basic stdio read methods (read, readline, readlines). Just .close() the object when you're done with it. If the etag argument is supplied, it will be used as the value of an If-None-Match request header. If the modified argument is supplied, it must be a tuple of 9 integers as returned by gmtime() in the standard Python time module. This MUST be in GMT (Greenwich Mean Time). The formatted date/time will be used as the value of an If-Modified-Since request header. If the agent argument is supplied, it will be used as the value of a User-Agent request header. If the referrer argument is supplied, it will be used as the value of a Referer[sic] request header. If handlers is supplied, it is a list of handlers used to build a urllib2 opener. """ if hasattr(url_file_stream_or_string, 'read'): return url_file_stream_or_string if url_file_stream_or_string == '-': return sys.stdin if urlparse.urlparse(url_file_stream_or_string)[0] in ('http', 'https', 'ftp'): if not agent: agent = USER_AGENT # test for inline user:password for basic auth auth = None if base64: urltype, rest = urllib.splittype(url_file_stream_or_string) realhost, rest = urllib.splithost(rest) if realhost: user_passwd, realhost = urllib.splituser(realhost) if user_passwd: url_file_stream_or_string = '%s://%s%s' % (urltype, realhost, rest) auth = base64.encodestring(user_passwd).strip() # try to open with urllib2 (to use optional headers) request = urllib2.Request(url_file_stream_or_string) request.add_header('User-Agent', agent) if etag: request.add_header('If-None-Match', etag) if modified: # format into an RFC 1123-compliant timestamp. We can't use # time.strftime() since the %a and %b directives can be affected # by the current locale, but RFC 2616 states that dates must be # in English. short_weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] request.add_header('If-Modified-Since', '%s, %02d %s %04d %02d:%02d:%02d GMT' % (short_weekdays[modified[6]], modified[2], months[modified[1] - 1], modified[0], modified[3], modified[4], modified[5])) if referrer: request.add_header('Referer', referrer) if gzip and zlib: request.add_header('Accept-encoding', 'gzip, deflate') elif gzip: request.add_header('Accept-encoding', 'gzip') elif zlib: request.add_header('Accept-encoding', 'deflate') else: request.add_header('Accept-encoding', '') if auth: request.add_header('Authorization', 'Basic %s' % auth) if ACCEPT_HEADER: request.add_header('Accept', ACCEPT_HEADER) request.add_header('A-IM', 'feed') # RFC 3229 support opener = apply(urllib2.build_opener, tuple([_FeedURLHandler()] + handlers)) opener.addheaders = [] # RMK - must clear so we only send our custom User-Agent try: return opener.open(request) finally: opener.close() # JohnD # try to open with native open function (if url_file_stream_or_string is a filename) try: return open(url_file_stream_or_string) except: pass # treat url_file_stream_or_string as string return _StringIO(str(url_file_stream_or_string)) _date_handlers = [] def registerDateHandler(func): '''Register a date handler function (takes string, returns 9-tuple date in GMT)''' _date_handlers.insert(0, func) # ISO-8601 date parsing routines written by Fazal Majid. # The ISO 8601 standard is very convoluted and irregular - a full ISO 8601 # parser is beyond the scope of feedparser and would be a worthwhile addition # to the Python library. # A single regular expression cannot parse ISO 8601 date formats into groups # as the standard is highly irregular (for instance is 030104 2003-01-04 or # 0301-04-01), so we use templates instead. # Please note the order in templates is significant because we need a # greedy match. _iso8601_tmpl = ['YYYY-?MM-?DD', 'YYYY-MM', 'YYYY-?OOO', 'YY-?MM-?DD', 'YY-?OOO', 'YYYY', '-YY-?MM', '-OOO', '-YY', '--MM-?DD', '--MM', '---DD', 'CC', ''] _iso8601_re = [ tmpl.replace( 'YYYY', r'(?P\d{4})').replace( 'YY', r'(?P\d\d)').replace( 'MM', r'(?P[01]\d)').replace( 'DD', r'(?P[0123]\d)').replace( 'OOO', r'(?P[0123]\d\d)').replace( 'CC', r'(?P\d\d$)') + r'(T?(?P\d{2}):(?P\d{2})' + r'(:(?P\d{2}))?' + r'(?P[+-](?P\d{2})(:(?P\d{2}))?|Z)?)?' for tmpl in _iso8601_tmpl] del tmpl _iso8601_matches = [re.compile(regex).match for regex in _iso8601_re] del regex def _parse_date_iso8601(dateString): '''Parse a variety of ISO-8601-compatible formats like 20040105''' m = None for _iso8601_match in _iso8601_matches: m = _iso8601_match(dateString) if m: break if not m: return if m.span() == (0, 0): return params = m.groupdict() ordinal = params.get('ordinal', 0) if ordinal: ordinal = int(ordinal) else: ordinal = 0 year = params.get('year', '--') if not year or year == '--': year = time.gmtime()[0] elif len(year) == 2: # ISO 8601 assumes current century, i.e. 93 -> 2093, NOT 1993 year = 100 * int(time.gmtime()[0] / 100) + int(year) else: year = int(year) month = params.get('month', '-') if not month or month == '-': # ordinals are NOT normalized by mktime, we simulate them # by setting month=1, day=ordinal if ordinal: month = 1 else: month = time.gmtime()[1] month = int(month) day = params.get('day', 0) if not day: # see above if ordinal: day = ordinal elif params.get('century', 0) or \ params.get('year', 0) or params.get('month', 0): day = 1 else: day = time.gmtime()[2] else: day = int(day) # special case of the century - is the first year of the 21st century # 2000 or 2001 ? The debate goes on... if 'century' in params.keys(): year = (int(params['century']) - 1) * 100 + 1 # in ISO 8601 most fields are optional for field in ['hour', 'minute', 'second', 'tzhour', 'tzmin']: if not params.get(field, None): params[field] = 0 hour = int(params.get('hour', 0)) minute = int(params.get('minute', 0)) second = int(params.get('second', 0)) # weekday is normalized by mktime(), we can ignore it weekday = 0 # daylight savings is complex, but not needed for feedparser's purposes # as time zones, if specified, include mention of whether it is active # (e.g. PST vs. PDT, CET). Using -1 is implementation-dependent and # and most implementations have DST bugs daylight_savings_flag = 0 tm = [year, month, day, hour, minute, second, weekday, ordinal, daylight_savings_flag] # ISO 8601 time zone adjustments tz = params.get('tz') if tz and tz != 'Z': if tz[0] == '-': tm[3] += int(params.get('tzhour', 0)) tm[4] += int(params.get('tzmin', 0)) elif tz[0] == '+': tm[3] -= int(params.get('tzhour', 0)) tm[4] -= int(params.get('tzmin', 0)) else: return None # Python's time.mktime() is a wrapper around the ANSI C mktime(3c) # which is guaranteed to normalize d/m/y/h/m/s. # Many implementations have bugs, but we'll pretend they don't. return time.localtime(time.mktime(tm)) registerDateHandler(_parse_date_iso8601) # 8-bit date handling routines written by ytrewq1. _korean_year = u'\ub144' # b3e2 in euc-kr _korean_month = u'\uc6d4' # bff9 in euc-kr _korean_day = u'\uc77c' # c0cf in euc-kr _korean_am = u'\uc624\uc804' # bfc0 c0fc in euc-kr _korean_pm = u'\uc624\ud6c4' # bfc0 c8c4 in euc-kr _korean_onblog_date_re = \ re.compile('(\d{4})%s\s+(\d{2})%s\s+(\d{2})%s\s+(\d{2}):(\d{2}):(\d{2})' % \ (_korean_year, _korean_month, _korean_day)) _korean_nate_date_re = \ re.compile(u'(\d{4})-(\d{2})-(\d{2})\s+(%s|%s)\s+(\d{,2}):(\d{,2}):(\d{,2})' % \ (_korean_am, _korean_pm)) def _parse_date_onblog(dateString): '''Parse a string according to the OnBlog 8-bit date format''' m = _korean_onblog_date_re.match(dateString) if not m: return w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write('OnBlog date parsed as: %s\n' % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_onblog) def _parse_date_nate(dateString): '''Parse a string according to the Nate 8-bit date format''' m = _korean_nate_date_re.match(dateString) if not m: return hour = int(m.group(5)) ampm = m.group(4) if (ampm == _korean_pm): hour += 12 hour = str(hour) if len(hour) == 1: hour = '0' + hour w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': hour, 'minute': m.group(6), 'second': m.group(7),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write('Nate date parsed as: %s\n' % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_nate) _mssql_date_re = \ re.compile('(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})(\.\d+)?') def _parse_date_mssql(dateString): '''Parse a string according to the MS SQL date format''' m = _mssql_date_re.match(dateString) if not m: return w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \ {'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\ 'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\ 'zonediff': '+09:00'} if _debug: sys.stderr.write('MS SQL date parsed as: %s\n' % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_mssql) # Unicode strings for Greek date strings _greek_months = \ { \ u'\u0399\u03b1\u03bd': u'Jan', # c9e1ed in iso-8859-7 u'\u03a6\u03b5\u03b2': u'Feb', # d6e5e2 in iso-8859-7 u'\u039c\u03ac\u03ce': u'Mar', # ccdcfe in iso-8859-7 u'\u039c\u03b1\u03ce': u'Mar', # cce1fe in iso-8859-7 u'\u0391\u03c0\u03c1': u'Apr', # c1f0f1 in iso-8859-7 u'\u039c\u03ac\u03b9': u'May', # ccdce9 in iso-8859-7 u'\u039c\u03b1\u03ca': u'May', # cce1fa in iso-8859-7 u'\u039c\u03b1\u03b9': u'May', # cce1e9 in iso-8859-7 u'\u0399\u03bf\u03cd\u03bd': u'Jun', # c9effded in iso-8859-7 u'\u0399\u03bf\u03bd': u'Jun', # c9efed in iso-8859-7 u'\u0399\u03bf\u03cd\u03bb': u'Jul', # c9effdeb in iso-8859-7 u'\u0399\u03bf\u03bb': u'Jul', # c9f9eb in iso-8859-7 u'\u0391\u03cd\u03b3': u'Aug', # c1fde3 in iso-8859-7 u'\u0391\u03c5\u03b3': u'Aug', # c1f5e3 in iso-8859-7 u'\u03a3\u03b5\u03c0': u'Sep', # d3e5f0 in iso-8859-7 u'\u039f\u03ba\u03c4': u'Oct', # cfeaf4 in iso-8859-7 u'\u039d\u03bf\u03ad': u'Nov', # cdefdd in iso-8859-7 u'\u039d\u03bf\u03b5': u'Nov', # cdefe5 in iso-8859-7 u'\u0394\u03b5\u03ba': u'Dec', # c4e5ea in iso-8859-7 } _greek_wdays = \ { \ u'\u039a\u03c5\u03c1': u'Sun', # caf5f1 in iso-8859-7 u'\u0394\u03b5\u03c5': u'Mon', # c4e5f5 in iso-8859-7 u'\u03a4\u03c1\u03b9': u'Tue', # d4f1e9 in iso-8859-7 u'\u03a4\u03b5\u03c4': u'Wed', # d4e5f4 in iso-8859-7 u'\u03a0\u03b5\u03bc': u'Thu', # d0e5ec in iso-8859-7 u'\u03a0\u03b1\u03c1': u'Fri', # d0e1f1 in iso-8859-7 u'\u03a3\u03b1\u03b2': u'Sat', # d3e1e2 in iso-8859-7 } _greek_date_format_re = \ re.compile(u'([^,]+),\s+(\d{2})\s+([^\s]+)\s+(\d{4})\s+(\d{2}):(\d{2}):(\d{2})\s+([^\s]+)') def _parse_date_greek(dateString): '''Parse a string according to a Greek 8-bit date format.''' m = _greek_date_format_re.match(dateString) if not m: return try: wday = _greek_wdays[m.group(1)] month = _greek_months[m.group(3)] except: return rfc822date = '%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s' % \ {'wday': wday, 'day': m.group(2), 'month': month, 'year': m.group(4),\ 'hour': m.group(5), 'minute': m.group(6), 'second': m.group(7),\ 'zonediff': m.group(8)} if _debug: sys.stderr.write('Greek date parsed as: %s\n' % rfc822date) return _parse_date_rfc822(rfc822date) registerDateHandler(_parse_date_greek) # Unicode strings for Hungarian date strings _hungarian_months = \ { \ u'janu\u00e1r': u'01', # e1 in iso-8859-2 u'febru\u00e1ri': u'02', # e1 in iso-8859-2 u'm\u00e1rcius': u'03', # e1 in iso-8859-2 u'\u00e1prilis': u'04', # e1 in iso-8859-2 u'm\u00e1ujus': u'05', # e1 in iso-8859-2 u'j\u00fanius': u'06', # fa in iso-8859-2 u'j\u00falius': u'07', # fa in iso-8859-2 u'augusztus': u'08', u'szeptember': u'09', u'okt\u00f3ber': u'10', # f3 in iso-8859-2 u'november': u'11', u'december': u'12', } _hungarian_date_format_re = \ re.compile(u'(\d{4})-([^-]+)-(\d{,2})T(\d{,2}):(\d{2})((\+|-)(\d{,2}:\d{2}))') def _parse_date_hungarian(dateString): '''Parse a string according to a Hungarian 8-bit date format.''' m = _hungarian_date_format_re.match(dateString) if not m: return try: month = _hungarian_months[m.group(2)] day = m.group(3) if len(day) == 1: day = '0' + day hour = m.group(4) if len(hour) == 1: hour = '0' + hour except: return w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s' % \ {'year': m.group(1), 'month': month, 'day': day,\ 'hour': hour, 'minute': m.group(5),\ 'zonediff': m.group(6)} if _debug: sys.stderr.write('Hungarian date parsed as: %s\n' % w3dtfdate) return _parse_date_w3dtf(w3dtfdate) registerDateHandler(_parse_date_hungarian) # W3DTF-style date parsing adapted from PyXML xml.utils.iso8601, written by # Drake and licensed under the Python license. Removed all range checking # for month, day, hour, minute, and second, since mktime will normalize # these later def _parse_date_w3dtf(dateString): def __extract_date(m): year = int(m.group('year')) if year < 100: year = 100 * int(time.gmtime()[0] / 100) + int(year) if year < 1000: return 0, 0, 0 julian = m.group('julian') if julian: julian = int(julian) month = julian / 30 + 1 day = julian % 30 + 1 jday = None while jday != julian: t = time.mktime((year, month, day, 0, 0, 0, 0, 0, 0)) jday = time.gmtime(t)[-2] diff = abs(jday - julian) if jday > julian: if diff < day: day = day - diff else: month = month - 1 day = 31 elif jday < julian: if day + diff < 28: day = day + diff else: month = month + 1 return year, month, day month = m.group('month') day = 1 if month is None: month = 1 else: month = int(month) day = m.group('day') if day: day = int(day) else: day = 1 return year, month, day def __extract_time(m): if not m: return 0, 0, 0 hours = m.group('hours') if not hours: return 0, 0, 0 hours = int(hours) minutes = int(m.group('minutes')) seconds = m.group('seconds') if seconds: seconds = int(seconds) else: seconds = 0 return hours, minutes, seconds def __extract_tzd(m): '''Return the Time Zone Designator as an offset in seconds from UTC.''' if not m: return 0 tzd = m.group('tzd') if not tzd: return 0 if tzd == 'Z': return 0 hours = int(m.group('tzdhours')) minutes = m.group('tzdminutes') if minutes: minutes = int(minutes) else: minutes = 0 offset = (hours*60 + minutes) * 60 if tzd[0] == '+': return -offset return offset __date_re = ('(?P\d\d\d\d)' '(?:(?P-|)' '(?:(?P\d\d\d)' '|(?P\d\d)(?:(?P=dsep)(?P\d\d))?))?') __tzd_re = '(?P[-+](?P\d\d)(?::?(?P\d\d))|Z)' __tzd_rx = re.compile(__tzd_re) __time_re = ('(?P\d\d)(?P:|)(?P\d\d)' '(?:(?P=tsep)(?P\d\d(?:[.,]\d+)?))?' + __tzd_re) __datetime_re = '%s(?:T%s)?' % (__date_re, __time_re) __datetime_rx = re.compile(__datetime_re) m = __datetime_rx.match(dateString) if (m is None) or (m.group() != dateString): return gmt = __extract_date(m) + __extract_time(m) + (0, 0, 0) if gmt[0] == 0: return return time.gmtime(time.mktime(gmt) + __extract_tzd(m) - time.timezone) registerDateHandler(_parse_date_w3dtf) def _parse_date_rfc822(dateString): '''Parse an RFC822, RFC1123, RFC2822, or asctime-style date''' data = dateString.split() if data[0][-1] in (',', '.') or data[0].lower() in rfc822._daynames: del data[0] if len(data) == 4: s = data[3] i = s.find('+') if i > 0: data[3:] = [s[:i], s[i+1:]] else: data.append('') dateString = " ".join(data) if len(data) < 5: dateString += ' 00:00:00 GMT' tm = rfc822.parsedate_tz(dateString) if tm: return time.gmtime(rfc822.mktime_tz(tm)) # rfc822.py defines several time zones, but we define some extra ones. # 'ET' is equivalent to 'EST', etc. _additional_timezones = {'AT': -400, 'ET': -500, 'CT': -600, 'MT': -700, 'PT': -800} rfc822._timezones.update(_additional_timezones) registerDateHandler(_parse_date_rfc822) def _parse_date(dateString): '''Parses a variety of date formats into a 9-tuple in GMT''' for handler in _date_handlers: try: date9tuple = handler(dateString) if not date9tuple: continue if len(date9tuple) != 9: if _debug: sys.stderr.write('date handler function must return 9-tuple\n') raise ValueError map(int, date9tuple) return date9tuple except Exception, e: if _debug: sys.stderr.write('%s raised %s\n' % (handler.__name__, repr(e))) pass return None def _getCharacterEncoding(http_headers, xml_data): '''Get the character encoding of the XML document http_headers is a dictionary xml_data is a raw string (not Unicode) This is so much trickier than it sounds, it's not even funny. According to RFC 3023 ('XML Media Types'), if the HTTP Content-Type is application/xml, application/*+xml, application/xml-external-parsed-entity, or application/xml-dtd, the encoding given in the charset parameter of the HTTP Content-Type takes precedence over the encoding given in the XML prefix within the document, and defaults to 'utf-8' if neither are specified. But, if the HTTP Content-Type is text/xml, text/*+xml, or text/xml-external-parsed-entity, the encoding given in the XML prefix within the document is ALWAYS IGNORED and only the encoding given in the charset parameter of the HTTP Content-Type header should be respected, and it defaults to 'us-ascii' if not specified. Furthermore, discussion on the atom-syntax mailing list with the author of RFC 3023 leads me to the conclusion that any document served with a Content-Type of text/* and no charset parameter must be treated as us-ascii. (We now do this.) And also that it must always be flagged as non-well-formed. (We now do this too.) If Content-Type is unspecified (input was local file or non-HTTP source) or unrecognized (server just got it totally wrong), then go by the encoding given in the XML prefix of the document and default to 'iso-8859-1' as per the HTTP specification (RFC 2616). Then, assuming we didn't find a character encoding in the HTTP headers (and the HTTP Content-type allowed us to look in the body), we need to sniff the first few bytes of the XML data and try to determine whether the encoding is ASCII-compatible. Section F of the XML specification shows the way here: http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info If the sniffed encoding is not ASCII-compatible, we need to make it ASCII compatible so that we can sniff further into the XML declaration to find the encoding attribute, which will tell us the true encoding. Of course, none of this guarantees that we will be able to parse the feed in the declared character encoding (assuming it was declared correctly, which many are not). CJKCodecs and iconv_codec help a lot; you should definitely install them if you can. http://cjkpython.i18n.org/ ''' def _parseHTTPContentType(content_type): '''takes HTTP Content-Type header and returns (content type, charset) If no charset is specified, returns (content type, '') If no content type is specified, returns ('', '') Both return parameters are guaranteed to be lowercase strings ''' content_type = content_type or '' content_type, params = cgi.parse_header(content_type) return content_type, params.get('charset', '').replace("'", '') sniffed_xml_encoding = '' xml_encoding = '' true_encoding = '' http_content_type, http_encoding = _parseHTTPContentType(http_headers.get('content-type')) # Must sniff for non-ASCII-compatible character encodings before # searching for XML declaration. This heuristic is defined in # section F of the XML specification: # http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info try: if xml_data[:4] == '\x4c\x6f\xa7\x94': # EBCDIC xml_data = _ebcdic_to_ascii(xml_data) elif xml_data[:4] == '\x00\x3c\x00\x3f': # UTF-16BE sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') and (xml_data[2:4] != '\x00\x00'): # UTF-16BE with BOM sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x3f\x00': # UTF-16LE sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and (xml_data[2:4] != '\x00\x00'): # UTF-16LE with BOM sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') elif xml_data[:4] == '\x00\x00\x00\x3c': # UTF-32BE sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x00\x00': # UTF-32LE sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') elif xml_data[:4] == '\x00\x00\xfe\xff': # UTF-32BE with BOM sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') elif xml_data[:4] == '\xff\xfe\x00\x00': # UTF-32LE with BOM sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') elif xml_data[:3] == '\xef\xbb\xbf': # UTF-8 with BOM sniffed_xml_encoding = 'utf-8' xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') else: # ASCII-compatible pass xml_encoding_match = re.compile('^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data) except: xml_encoding_match = None if xml_encoding_match: xml_encoding = xml_encoding_match.groups()[0].lower() if sniffed_xml_encoding and (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', 'iso-10646-ucs-4', 'ucs-4', 'csucs4', 'utf-16', 'utf-32', 'utf_16', 'utf_32', 'utf16', 'u16')): xml_encoding = sniffed_xml_encoding acceptable_content_type = 0 application_content_types = ('application/xml', 'application/xml-dtd', 'application/xml-external-parsed-entity') text_content_types = ('text/xml', 'text/xml-external-parsed-entity') if (http_content_type in application_content_types) or \ (http_content_type.startswith('application/') and http_content_type.endswith('+xml')): acceptable_content_type = 1 true_encoding = http_encoding or xml_encoding or 'utf-8' elif (http_content_type in text_content_types) or \ (http_content_type.startswith('text/')) and http_content_type.endswith('+xml'): acceptable_content_type = 1 true_encoding = http_encoding or 'us-ascii' elif http_content_type.startswith('text/'): true_encoding = http_encoding or 'us-ascii' elif http_headers and (not http_headers.has_key('content-type')): true_encoding = xml_encoding or 'iso-8859-1' else: true_encoding = xml_encoding or 'utf-8' return true_encoding, http_encoding, xml_encoding, sniffed_xml_encoding, acceptable_content_type def _toUTF8(data, encoding): '''Changes an XML data stream on the fly to specify a new encoding data is a raw sequence of bytes (not Unicode) that is presumed to be in %encoding already encoding is a string recognized by encodings.aliases ''' if _debug: sys.stderr.write('entering _toUTF8, trying encoding %s\n' % encoding) # strip Byte Order Mark (if present) if (len(data) >= 4) and (data[:2] == '\xfe\xff') and (data[2:4] != '\x00\x00'): if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-16be': sys.stderr.write('trying utf-16be instead\n') encoding = 'utf-16be' data = data[2:] elif (len(data) >= 4) and (data[:2] == '\xff\xfe') and (data[2:4] != '\x00\x00'): if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-16le': sys.stderr.write('trying utf-16le instead\n') encoding = 'utf-16le' data = data[2:] elif data[:3] == '\xef\xbb\xbf': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-8': sys.stderr.write('trying utf-8 instead\n') encoding = 'utf-8' data = data[3:] elif data[:4] == '\x00\x00\xfe\xff': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-32be': sys.stderr.write('trying utf-32be instead\n') encoding = 'utf-32be' data = data[4:] elif data[:4] == '\xff\xfe\x00\x00': if _debug: sys.stderr.write('stripping BOM\n') if encoding != 'utf-32le': sys.stderr.write('trying utf-32le instead\n') encoding = 'utf-32le' data = data[4:] newdata = unicode(data, encoding) if _debug: sys.stderr.write('successfully converted %s data to unicode\n' % encoding) declmatch = re.compile('^<\?xml[^>]*?>') newdecl = '''''' if declmatch.search(newdata): newdata = declmatch.sub(newdecl, newdata) else: newdata = newdecl + u'\n' + newdata return newdata.encode('utf-8') def _stripDoctype(data): '''Strips DOCTYPE from XML document, returns (rss_version, stripped_data) rss_version may be 'rss091n' or None stripped_data is the same XML document, minus the DOCTYPE ''' entity_pattern = re.compile(r']*?)>', re.MULTILINE) data = entity_pattern.sub('', data) doctype_pattern = re.compile(r']*?)>', re.MULTILINE) doctype_results = doctype_pattern.findall(data) doctype = doctype_results and doctype_results[0] or '' if doctype.lower().count('netscape'): version = 'rss091n' else: version = None data = doctype_pattern.sub('', data) return version, data def parse(url_file_stream_or_string, etag=None, modified=None, agent=None, referrer=None, handlers=[]): '''Parse a feed from a URL, file, stream, or string''' result = FeedParserDict() result['feed'] = FeedParserDict() result['entries'] = [] if _XML_AVAILABLE: result['bozo'] = 0 if type(handlers) == types.InstanceType: handlers = [handlers] try: f = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers) data = f.read() except Exception, e: result['bozo'] = 1 result['bozo_exception'] = e data = '' f = None # if feed is gzip-compressed, decompress it if f and data and hasattr(f, 'headers'): if gzip and f.headers.get('content-encoding', '') == 'gzip': try: data = gzip.GzipFile(fileobj=_StringIO(data)).read() except Exception, e: # Some feeds claim to be gzipped but they're not, so # we get garbage. Ideally, we should re-request the # feed without the 'Accept-encoding: gzip' header, # but we don't. result['bozo'] = 1 result['bozo_exception'] = e data = '' elif zlib and f.headers.get('content-encoding', '') == 'deflate': try: data = zlib.decompress(data, -zlib.MAX_WBITS) except Exception, e: result['bozo'] = 1 result['bozo_exception'] = e data = '' # save HTTP headers if hasattr(f, 'info'): info = f.info() result['etag'] = info.getheader('ETag') last_modified = info.getheader('Last-Modified') if last_modified: result['modified'] = _parse_date(last_modified) if hasattr(f, 'url'): result['href'] = f.url result['status'] = 200 if hasattr(f, 'status'): result['status'] = f.status if hasattr(f, 'headers'): result['headers'] = f.headers.dict if hasattr(f, 'close'): f.close() # there are four encodings to keep track of: # - http_encoding is the encoding declared in the Content-Type HTTP header # - xml_encoding is the encoding declared in the ; changed # project name #2.5 - 7/25/2003 - MAP - changed to Python license (all contributors agree); # removed unnecessary urllib code -- urllib2 should always be available anyway; # return actual url, status, and full HTTP headers (as result['url'], # result['status'], and result['headers']) if parsing a remote feed over HTTP -- # this should pass all the HTTP tests at ; # added the latest namespace-of-the-week for RSS 2.0 #2.5.1 - 7/26/2003 - RMK - clear opener.addheaders so we only send our custom # User-Agent (otherwise urllib2 sends two, which confuses some servers) #2.5.2 - 7/28/2003 - MAP - entity-decode inline xml properly; added support for # inline and as used in some RSS 2.0 feeds #2.5.3 - 8/6/2003 - TvdV - patch to track whether we're inside an image or # textInput, and also to return the character encoding (if specified) #2.6 - 1/1/2004 - MAP - dc:author support (MarekK); fixed bug tracking # nested divs within content (JohnD); fixed missing sys import (JohanS); # fixed regular expression to capture XML character encoding (Andrei); # added support for Atom 0.3-style links; fixed bug with textInput tracking; # added support for cloud (MartijnP); added support for multiple # category/dc:subject (MartijnP); normalize content model: 'description' gets # description (which can come from description, summary, or full content if no # description), 'content' gets dict of base/language/type/value (which can come # from content:encoded, xhtml:body, content, or fullitem); # fixed bug matching arbitrary Userland namespaces; added xml:base and xml:lang # tracking; fixed bug tracking unknown tags; fixed bug tracking content when # element is not in default namespace (like Pocketsoap feed); # resolve relative URLs in link, guid, docs, url, comments, wfw:comment, # wfw:commentRSS; resolve relative URLs within embedded HTML markup in # description, xhtml:body, content, content:encoded, title, subtitle, # summary, info, tagline, and copyright; added support for pingback and # trackback namespaces #2.7 - 1/5/2004 - MAP - really added support for trackback and pingback # namespaces, as opposed to 2.6 when I said I did but didn't really; # sanitize HTML markup within some elements; added mxTidy support (if # installed) to tidy HTML markup within some elements; fixed indentation # bug in _parse_date (FazalM); use socket.setdefaulttimeout if available # (FazalM); universal date parsing and normalization (FazalM): 'created', modified', # 'issued' are parsed into 9-tuple date format and stored in 'created_parsed', # 'modified_parsed', and 'issued_parsed'; 'date' is duplicated in 'modified' # and vice-versa; 'date_parsed' is duplicated in 'modified_parsed' and vice-versa #2.7.1 - 1/9/2004 - MAP - fixed bug handling " and '. fixed memory # leak not closing url opener (JohnD); added dc:publisher support (MarekK); # added admin:errorReportsTo support (MarekK); Python 2.1 dict support (MarekK) #2.7.4 - 1/14/2004 - MAP - added workaround for improperly formed
    tags in # encoded HTML (skadz); fixed unicode handling in normalize_attrs (ChrisL); # fixed relative URI processing for guid (skadz); added ICBM support; added # base64 support #2.7.5 - 1/15/2004 - MAP - added workaround for malformed DOCTYPE (seen on many # blogspot.com sites); added _debug variable #2.7.6 - 1/16/2004 - MAP - fixed bug with StringIO importing #3.0b3 - 1/23/2004 - MAP - parse entire feed with real XML parser (if available); # added several new supported namespaces; fixed bug tracking naked markup in # description; added support for enclosure; added support for source; re-added # support for cloud which got dropped somehow; added support for expirationDate #3.0b4 - 1/26/2004 - MAP - fixed xml:lang inheritance; fixed multiple bugs tracking # xml:base URI, one for documents that don't define one explicitly and one for # documents that define an outer and an inner xml:base that goes out of scope # before the end of the document #3.0b5 - 1/26/2004 - MAP - fixed bug parsing multiple links at feed level #3.0b6 - 1/27/2004 - MAP - added feed type and version detection, result['version'] # will be one of SUPPORTED_VERSIONS.keys() or empty string if unrecognized; # added support for creativeCommons:license and cc:license; added support for # full Atom content model in title, tagline, info, copyright, summary; fixed bug # with gzip encoding (not always telling server we support it when we do) #3.0b7 - 1/28/2004 - MAP - support Atom-style author element in author_detail # (dictionary of 'name', 'url', 'email'); map author to author_detail if author # contains name + email address #3.0b8 - 1/28/2004 - MAP - added support for contributor #3.0b9 - 1/29/2004 - MAP - fixed check for presence of dict function; added # support for summary #3.0b10 - 1/31/2004 - MAP - incorporated ISO-8601 date parsing routines from # xml.util.iso8601 #3.0b11 - 2/2/2004 - MAP - added 'rights' to list of elements that can contain # dangerous markup; fiddled with decodeEntities (not right); liberalized # date parsing even further #3.0b12 - 2/6/2004 - MAP - fiddled with decodeEntities (still not right); # added support to Atom 0.2 subtitle; added support for Atom content model # in copyright; better sanitizing of dangerous HTML elements with end tags # (script, frameset) #3.0b13 - 2/8/2004 - MAP - better handling of empty HTML tags (br, hr, img, # etc.) in embedded markup, in either HTML or XHTML form (
    ,
    ,
    ) #3.0b14 - 2/8/2004 - MAP - fixed CDATA handling in non-wellformed feeds under # Python 2.1 #3.0b15 - 2/11/2004 - MAP - fixed bug resolving relative links in wfw:commentRSS; # fixed bug capturing author and contributor URL; fixed bug resolving relative # links in author and contributor URL; fixed bug resolvin relative links in # generator URL; added support for recognizing RSS 1.0; passed Simon Fell's # namespace tests, and included them permanently in the test suite with his # permission; fixed namespace handling under Python 2.1 #3.0b16 - 2/12/2004 - MAP - fixed support for RSS 0.90 (broken in b15) #3.0b17 - 2/13/2004 - MAP - determine character encoding as per RFC 3023 #3.0b18 - 2/17/2004 - MAP - always map description to summary_detail (Andrei); # use libxml2 (if available) #3.0b19 - 3/15/2004 - MAP - fixed bug exploding author information when author # name was in parentheses; removed ultra-problematic mxTidy support; patch to # workaround crash in PyXML/expat when encountering invalid entities # (MarkMoraes); support for textinput/textInput #3.0b20 - 4/7/2004 - MAP - added CDF support #3.0b21 - 4/14/2004 - MAP - added Hot RSS support #3.0b22 - 4/19/2004 - MAP - changed 'channel' to 'feed', 'item' to 'entries' in # results dict; changed results dict to allow getting values with results.key # as well as results[key]; work around embedded illformed HTML with half # a DOCTYPE; work around malformed Content-Type header; if character encoding # is wrong, try several common ones before falling back to regexes (if this # works, bozo_exception is set to CharacterEncodingOverride); fixed character # encoding issues in BaseHTMLProcessor by tracking encoding and converting # from Unicode to raw strings before feeding data to sgmllib.SGMLParser; # convert each value in results to Unicode (if possible), even if using # regex-based parsing #3.0b23 - 4/21/2004 - MAP - fixed UnicodeDecodeError for feeds that contain # high-bit characters in attributes in embedded HTML in description (thanks # Thijs van de Vossen); moved guid, date, and date_parsed to mapped keys in # FeedParserDict; tweaked FeedParserDict.has_key to return True if asking # about a mapped key #3.0fc1 - 4/23/2004 - MAP - made results.entries[0].links[0] and # results.entries[0].enclosures[0] into FeedParserDict; fixed typo that could # cause the same encoding to be tried twice (even if it failed the first time); # fixed DOCTYPE stripping when DOCTYPE contained entity declarations; # better textinput and image tracking in illformed RSS 1.0 feeds #3.0fc2 - 5/10/2004 - MAP - added and passed Sam's amp tests; added and passed # my blink tag tests #3.0fc3 - 6/18/2004 - MAP - fixed bug in _changeEncodingDeclaration that # failed to parse utf-16 encoded feeds; made source into a FeedParserDict; # duplicate admin:generatorAgent/@rdf:resource in generator_detail.url; # added support for image; refactored parse() fallback logic to try other # encodings if SAX parsing fails (previously it would only try other encodings # if re-encoding failed); remove unichr madness in normalize_attrs now that # we're properly tracking encoding in and out of BaseHTMLProcessor; set # feed.language from root-level xml:lang; set entry.id from rdf:about; # send Accept header #3.0 - 6/21/2004 - MAP - don't try iso-8859-1 (can't distinguish between # iso-8859-1 and windows-1252 anyway, and most incorrectly marked feeds are # windows-1252); fixed regression that could cause the same encoding to be # tried twice (even if it failed the first time) #3.0.1 - 6/22/2004 - MAP - default to us-ascii for all text/* content types; # recover from malformed content-type header parameter with no equals sign # ('text/xml; charset:iso-8859-1') #3.1 - 6/28/2004 - MAP - added and passed tests for converting HTML entities # to Unicode equivalents in illformed feeds (aaronsw); added and # passed tests for converting character entities to Unicode equivalents # in illformed feeds (aaronsw); test for valid parsers when setting # XML_AVAILABLE; make version and encoding available when server returns # a 304; add handlers parameter to pass arbitrary urllib2 handlers (like # digest auth or proxy support); add code to parse username/password # out of url and send as basic authentication; expose downloading-related # exceptions in bozo_exception (aaronsw); added __contains__ method to # FeedParserDict (aaronsw); added publisher_detail (aaronsw) #3.2 - 7/3/2004 - MAP - use cjkcodecs and iconv_codec if available; always # convert feed to UTF-8 before passing to XML parser; completely revamped # logic for determining character encoding and attempting XML parsing # (much faster); increased default timeout to 20 seconds; test for presence # of Location header on redirects; added tests for many alternate character # encodings; support various EBCDIC encodings; support UTF-16BE and # UTF16-LE with or without a BOM; support UTF-8 with a BOM; support # UTF-32BE and UTF-32LE with or without a BOM; fixed crashing bug if no # XML parsers are available; added support for 'Content-encoding: deflate'; # send blank 'Accept-encoding: ' header if neither gzip nor zlib modules # are available #3.3 - 7/15/2004 - MAP - optimize EBCDIC to ASCII conversion; fix obscure # problem tracking xml:base and xml:lang if element declares it, child # doesn't, first grandchild redeclares it, and second grandchild doesn't; # refactored date parsing; defined public registerDateHandler so callers # can add support for additional date formats at runtime; added support # for OnBlog, Nate, MSSQL, Greek, and Hungarian dates (ytrewq1); added # zopeCompatibilityHack() which turns FeedParserDict into a regular # dictionary, required for Zope compatibility, and also makes command- # line debugging easier because pprint module formats real dictionaries # better than dictionary-like objects; added NonXMLContentType exception, # which is stored in bozo_exception when a feed is served with a non-XML # media type such as 'text/plain'; respect Content-Language as default # language if not xml:lang is present; cloud dict is now FeedParserDict; # generator dict is now FeedParserDict; better tracking of xml:lang, # including support for xml:lang='' to unset the current language; # recognize RSS 1.0 feeds even when RSS 1.0 namespace is not the default # namespace; don't overwrite final status on redirects (scenarios: # redirecting to a URL that returns 304, redirecting to a URL that # redirects to another URL with a different type of redirect); add # support for HTTP 303 redirects #4.0 - MAP - support for relative URIs in xml:base attribute; fixed # encoding issue with mxTidy (phopkins); preliminary support for RFC 3229; # support for Atom 1.0; support for iTunes extensions; new 'tags' for # categories/keywords/etc. as array of dict # {'term': term, 'scheme': scheme, 'label': label} to match Atom 1.0 # terminology; parse RFC 822-style dates with no time; lots of other # bug fixes #4.1 - MAP - removed socket timeout; added support for chardet library PenguinTV-4.2.0/penguintv/utils.py0000644000000000000000000005127111450512725014023 0ustar # -*- coding: utf-8 -*- # Written by Owen Williams # see LICENSE for license information import os,sys,time, pwd, os.path import subprocess import string import urllib import string import locale import gettext import shutil import logging VERSION="4.2.0" try: from sqlite3 import OperationalError as OperationalError except: from pysqlite2.dbapi2 import OperationalError as OperationalError try: import hildon logging.debug("Detected hildon environment") RUNNING_HILDON = True except: RUNNING_HILDON = False try: if RUNNING_HILDON: DO_GTK_CHECKS = False else: import gtk DO_GTK_CHECKS = True except: DO_GTK_CHECKS = False #locale.setlocale(locale.LC_ALL, '') gettext.install('penguintv', '/usr/share/locale') gettext.bindtextdomain('penguintv', '/usr/share/locale') gettext.textdomain('penguintv') _=gettext.gettext RUNNING_SUGAR = os.environ.has_key('SUGAR_PENGUINTV') if RUNNING_SUGAR: #I do this in case we're running in a python environment that has lucene #and/or gconf but we want to pretend they aren't there HAS_LUCENE = False HAS_SEARCH = False HAS_XAPIAN = False HAS_GCONF = False HAS_GNOMEVFS = False HAS_PYXML = False HAS_STATUS_ICON = False HAS_MOZILLA = True #HAS_GSTREAMER = True #I'm having trouble building gst for jhbuild, so detect this try: import pygst pygst.require("0.10") import gst HAS_GSTREAMER = True except: HAS_GSTREAMER = False else: if DO_GTK_CHECKS: try: import gtk import gtkmozembed HAS_MOZILLA = True except: try: from ptvmozembed import gtkmozembed logging.info("Using PenguinTV-built gtkmozembed") HAS_MOZILLA = True except: HAS_MOZILLA = False HAS_SEARCH = False try: import xapian HAS_XAPIAN = True HAS_SEARCH = True except: HAS_XAPIAN = False try: import PyLucene HAS_LUCENE = True HAS_SEARCH = True except: HAS_LUCENE = False try: import gconf HAS_GCONF = True except: try: from gnome import gconf HAS_GCONF = True except: HAS_GCONF = False if DO_GTK_CHECKS: try: import gnomevfs HAS_GNOMEVFS = True except: try: from gnome import gnomevfs HAS_GNOMEVFS = True except: HAS_GNOMEVFS = False try: from xml.sax.handler import ContentHandler HAS_PYXML = True except: try: from xml.sax.saxutils import DefaultHandler HAS_PYXML = True except: HAS_PYXML = False if DO_GTK_CHECKS and not RUNNING_HILDON: if gtk.pygtk_version >= (2, 10, 0): HAS_STATUS_ICON = True else: HAS_STATUS_ICON = False try: import pynotify HAS_PYNOTIFY = True except: HAS_PYNOTIFY = False try: import pygst pygst.require("0.10") import gst HAS_GSTREAMER = True except: HAS_GSTREAMER = False try: import dbus HAS_DBUS = True except: HAS_DBUS = False if RUNNING_HILDON: #having a status icon causes tons of problems (causes hildonn UI to crash) HAS_STATUS_ICON = False HAS_MOZILLA = False HAS_GNOMEVFS = False #DEBUG #_USE_KDE_OVERRIDE=False # Lucene sucks, forget it HAS_LUCENE = False ##disable while still unstable #HAS_XAPIAN = False if not HAS_XAPIAN: HAS_SEARCH = False if HAS_XAPIAN: logging.info("Using Xapian search engine") elif HAS_LUCENE: logging.info("Using Lucene search engine") else: logging.info("xapian or lucene not found, search disabled") # Pynotify is still broken, forget it #HAS_PYNOTIFY = False #HAS_PYXML = False #HAS_STATUS_ICON = False #HAS_GNOMEVFS = False #HAS_MOZILLA=False #too buggy, disable for release ENABLE_ARTICLESYNC=True def get_home(): if RUNNING_SUGAR: import sugar.env return os.path.join(sugar.env.get_profile_path(), 'penguintv') else: return os.path.join(os.getenv('HOME'), ".penguintv") def format_size(size): if size > 1073741824: return "%.2f GB" % (float(size)/1073741824) elif size > 1048576: return str(int(round(size/1048576)))+ " MB" elif size > 1024: return str(int(round(size/1024)))+" KB" else: return str(size)+" bytes" def GetPrefix(): if os.environ.has_key('PTV_PREFIX'): h, t = os.path.split(os.environ['PTV_PREFIX']) return h h, t = os.path.split(os.path.split(os.path.abspath(sys.argv[0]))[0]) return h _glade_prefix = None _share_prefix = None def get_share_prefix(): global _share_prefix if _share_prefix is not None: return _share_prefix get_glade_prefix() return _share_prefix def get_glade_prefix(): global _glade_prefix global _share_prefix if _glade_prefix is not None: return _glade_prefix logging.debug("finding glade prefix") import utils for p in (os.path.join(GetPrefix(),"share","penguintv","glade"), os.path.join(GetPrefix(),"share","glade"), os.path.join(os.path.split(os.path.abspath(sys.argv[0]))[0],"share"), os.path.join(GetPrefix(),"share","sugar","activities","ptv","share"), os.path.join(os.path.split(os.path.split(utils.__file__)[0])[0],'share','glade'), "/usr/share/penguintv/glade"): try: os.stat(os.path.join(p,"dialogs.glade")) logging.debug("glade prefix found: %s" % (p,)) _glade_prefix = p _share_prefix = '/'.join(p.split('/')[0:-1]) return _glade_prefix except: continue return None def get_image_path(filename): for p in (get_share_prefix(), os.path.join(GetPrefix(), 'share', 'pixmaps'), os.path.join(GetPrefix(), 'share'), #in case the install is still in the source dirs os.path.join(get_share_prefix(), 'pixmaps')): try: icon_file = os.path.join(p, filename) os.stat(icon_file) return icon_file except Exception, e: continue logging.error("icon not found:" + filename) raise e return icon_file def hours(n): #this func copyright Bram Cohen if n == -1: return '' if n == 0: return _('complete!') n = long(n) h, r = divmod(n, 60 * 60) m, sec = divmod(r, 60) if h > 1000000: return '' if h > 0: return '%d:%02d:%02d' % (h, m, sec) else: return '%d:%02d' % (m, sec) def is_known_media(filename): if os.path.isdir(filename): for root,dirs,files in os.walk(filename): for f in files: try: return desktop_has_file_handler(f) except: pass return False try: return desktop_has_file_handler(filename) except: return False def get_play_command_for(filename): known_players={ 'totem':'--enqueue', 'xine':'--enqueue', 'mplayer': '-enqueue', 'banshee': '--enqueue'} if is_kde(): try: mime_magic = kio.KMimeMagic() mimetype = str(mime_magic.findFileType(filename).mimeType()) #mimetype = str(kio.KMimeType.findByPath(filename).defaultMimeType()) service = kio.KServiceTypeProfile.preferredService(mimetype,"Application") if service is None: #no service, so we use kfmclient and kde should launch a helper window logging.info("unknown type, using kfmclient") return "kfmclient exec " full_qual_prog = str(service.exec_()).replace("%U","").strip() except: logging.info("error getting type, using kfmclient") return "kfmclient exec " else: #GNOME -- notice how short and sweet this is in comparison :P if HAS_GNOMEVFS: try: mimetype = gnomevfs.get_mime_type(urllib.quote(filename)) #fix for penny arcade filenames full_qual_prog = gnomevfs.mime_get_default_application(mimetype)[2] except: logging.info("unknown type, using gnome-open") return "gnome-open " else: # :( return "echo " try: path, program = os.path.split(full_qual_prog) except: program = full_qual_prog if known_players.has_key(program): return full_qual_prog+" "+known_players[program] return full_qual_prog def get_dated_dir(t=None): if t is None: t = time.localtime() today = time.strftime("%Y-%m-%d", t) return today #http://www.faqts.com/knowledge_base/view.phtml/aid/2682 class GlobDirectoryWalker: # a forward iterator that traverses a directory tree def __init__(self, directory, pattern="*"): self.stack = [directory] self.pattern = pattern self.files = [] self.index = 0 def __getitem__(self, index): import fnmatch while 1: try: file = self.files[self.index] self.index = self.index + 1 except IndexError: # pop next directory from stack try: while True: self.directory = self.stack.pop() self.index = 0 self.files = os.listdir(self.directory) #loops if we have a problem listing the directory break #but if it works we break except OSError, e: continue #evil... but it works else: # got a filename fullname = os.path.join(self.directory, file) if os.path.isdir(fullname) and not os.path.islink(fullname): self.stack.append(fullname) if fnmatch.fnmatch(file, self.pattern): return fullname #usage: #for file in GlobDirectoryWalker(".", "*.py"): # print file def deltree(path): #adapted and corrected from: http://aspn.activestate.com/ASPN/docs/ActivePython/2.2/PyWin32/Recursive_directory_deletes_and_special_files.html for file in os.listdir(path): file_or_dir = os.path.join(path,file) if os.path.isdir(file_or_dir) and not os.path.islink(file_or_dir): deltree(file_or_dir) #it's a directory reucursive call to function again else: os.remove(file_or_dir) #it's a file, delete it os.rmdir(path) #delete the directory here #http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/82465 def _mkdir(newdir): """works the way a good mkdir should :) - already exists, silently complete - regular file in the way, raise an exception - parent directory(ies) does not exist, make them as well """ if os.path.isdir(newdir): pass elif os.path.isfile(newdir): raise OSError("a file with the same name as the desired " \ "dir, '%s', already exists." % newdir) else: head, tail = os.path.split(newdir) if head and not os.path.isdir(head): _mkdir(head) #print "_mkdir %s" % repr(newdir) if tail: os.mkdir(newdir) def my_quote(s): """Replaces some problematic characters with html equivalent if necessary""" #right now just & to &, but not & to &amp; #instead of doing this with logic, just "unquote" the amps and then requote them s=string.replace(s,"&","&") s=string.replace(s,"&","&") return s def make_pathsafe(s): unsafe=('<','>',':','"','/','\\','|') for c in unsafe: s=string.replace(s,c,'_') return s def get_hyphen(): if RUNNING_HILDON: return "--" else: return "–" def uniquer(seq, idfun=None): if not seq: return [] if idfun is None: def idfun(x): return x seen = {} result = [] for item in seq: marker = idfun(item) # in old Python versions: # if seen.has_key(marker) # but in new ones: if marker in seen: continue seen[marker] = 1 result.append(item) return result def is_kde(): """Returns true if the user is running a full KDE desktop, or if it has been overridden for debug purposes""" try: if _USE_KDE_OVERRIDE == True: return True except: return False return os.environ.has_key('KDE_FULL_SESSION') def desktop_has_file_handler(filename): """Returns true if the desktop has a file handler for this filetype.""" if is_kde(): # If KDE can't handle the file, we'll use kfmclient exec to run the file, # and KDE will show a dialog asking for the program # to use anyway. return True else: if HAS_GNOMEVFS: # Otherwise, use GNOMEVFS to find the appropriate handler handler = gnomevfs.mime_get_default_application(gnomevfs.get_mime_type(urllib.quote(str(filename)))) #PA fix #Nerdist fix, urllib prefers strings over unicode if handler is not None: return True return False else: #FIXME: olpc doesn't know what the fuck... pretend yes and let error get caught later return True def is_file_media(filename): """Returns true if this is a media file (audio or video) and false if it is any other type of file""" if is_kde(): mime_magic = kio.KMimeMagic() mimetype = str(mime_magic.findFileType(filename).mimeType()) elif HAS_GNOMEVFS: mimetype = gnomevfs.get_mime_type(urllib.quote(filename)) else: return False valid_mimes=['video','audio','mp4','realmedia','m4v','mov'] for mime in valid_mimes: if mime in mimetype: return True return False #http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440481 #class MLStripper(HTMLParser.HTMLParser): # def __init__(self): # self.reset() # self.fed = [] # def handle_data(self, d): # self.fed.append(d) # def get_fed_data(self): # return ''.join(self.fed) #usage: #def strip(s): # """ Strip illegal HTML tags from string s """ # parser = StrippingParser() # parser.feed(s) # parser.close() # parser.cleanup() # return parser.result ##http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/457667 ##I know this is very bad, but damn if it doesn't work #import __main__ # #class SuperGlobal: # # def __getattr__(self, name): # return __main__.__dict__.get(name, None) # # def __setattr__(self, name, value): # __main__.__dict__[name] = value # # def __delattr__(self, name): # if __main__.__dict__.has_key(name): # del __main__.__dict__[name] #thanks http://www.peterbe.com/plog/html-entity-fixer #from htmlentitydefs import entitydefs #import re #_unicode = re.compile('^&#(.*);$') #_entities_pared = {} #for entity in entitydefs: # if len(_unicode.findall(entitydefs[entity]))==0: # try: # _entities_pared[entity]=unicode(entitydefs[entity]) #this weeds out some more naughty characters # except: # pass _my_entities = {'amp': u'&', 'lt': u'<', 'gt': u'>', 'quot': u'"'} def html_entity_unfixer(text): """replace html-encoded html with regular html. I don't use htmlentitydefs because it causes utf problems""" for entity in _my_entities.keys(): text = text.replace("&"+entity+";", _my_entities[entity]) return text #def lucene_escape(text): # #+ - & | ! ( ) { } [ ] ^ " ~ * ? : \\ # escape_chars="""+-&|!(){}[]^"~*?:\\""" # text = text.replace( def get_disk_free(f="/"): """returns free disk space in bytes for the disk that contains file f, defaulting to root. Returns 0 on error""" stats = os.statvfs(f) #if this fails it will raise exactly the right error return stats.f_bsize * stats.f_bavail def get_disk_total(f="/"): """returns total disk space in bytes for the disk that contains file f, defaulting to root. Returns 0 on error""" stats = os.statvfs(f) #if this fails it will raise exactly the right error return stats.f_bsize * stats.f_blocks def init_gtkmozembed(): """We need to set up mozilla with set_comp_path in order for it not to crash. The fun part is not hardcoding that path since we have no way of getting it from the module itself. good luck with this""" logging.info("initializing mozilla") assert HAS_MOZILLA #new, easy behavior if os.path.exists('/usr/lib/xulrunner-1.9'): gtkmozembed.set_comp_path('/usr/lib/xulrunner-1.9') return #old, disgusting behavior if not os.environ.has_key('MOZILLA_FIVE_HOME'): return False moz_path = os.environ['MOZILLA_FIVE_HOME'] cmd = "ldd " + moz_path + '/libxpcom.so | grep "not found"' p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval = p.wait() stderr = p.stderr.read() stdout = p.stdout.read() if len(stderr) > 1 or len(stdout) > 0: print """***ERROR initializing mozilla. PenguinTV may crash shortly. You may need to export LD_LIBRARY_PATH=$MOZILLA_FIVE_HOME """ return False _init_mozilla_proxy() logging.info("initializing mozilla in " + str(moz_path)) gtkmozembed.set_comp_path(moz_path) return True def _init_mozilla_proxy(): if RUNNING_SUGAR: # don't even try return home = os.path.join(os.getenv('HOME'), ".penguintv") _mkdir(os.path.join(home, "gecko")) sys_proxy = {} sys_proxy['host'] = "" sys_proxy['port'] = 0 sys_proxy['type'] = 0 sys_proxy['autoconfig_url'] = "" # get system proxy prefs if any #if True: # sys_proxy['host'] = "6.2.7.2" # sys_proxy['port'] = 8080 # sys_proxy['type'] = 1 # sys_proxy['autoconfig_url'] = "testing" if HAS_GCONF: # get gnome http proxy preferences conf = gconf.client_get_default() use_proxy = conf.get_bool("/system/http_proxy/use_http_proxy") if use_proxy: sys_proxy['host'] = conf.get_string("/system/http_proxy/host") sys_proxy['port'] = conf.get_int("/system/http_proxy/port") sys_proxy['type'] = 1 else: # get most-recently modified moz prefs. prefs_files = [] for f in GlobDirectoryWalker(os.path.join(os.getenv('HOME'), ".mozilla"), "prefs.js"): prefs_files.append((os.stat(f).st_mtime, f)) if len(prefs_files) != 0: prefs_files.sort() prefs_files.reverse() source_prefs = prefs_files[0][1] sys_proxy = _get_proxy_prefs(source_prefs) # check against current settings try: os.stat(os.path.join(home, "gecko", "prefs.js")) cur_proxy = _get_proxy_prefs(os.path.join(home, "gecko", "prefs.js")) if sys_proxy == cur_proxy: logging.info("gecko proxy settings up to date") return except: pass try: logging.info("updating gecko proxy settings") f = open(os.path.join(home, "gecko", "prefs.js"), "w") f.write("""# Mozilla User Preferences /* Do not edit this file. * * If you make changes to this file while the application is running, * the changes will be overwritten when the application exits. * * To make a manual change to preferences, you can visit the URL about:config * For more information, see http://www.mozilla.org/unix/customizing.html#prefs */ user_pref("network.proxy.type", %d); user_pref("network.proxy.http", "%s"); user_pref("network.proxy.http_port", %d); user_pref("network.proxy.autoconfig_url", "%s"); """ % (sys_proxy['type'], sys_proxy['host'], sys_proxy['port'], sys_proxy['autoconfig_url'])) f.close() except: logging.warning("couldn't create prefs.js, proxy server connections may not work") def _get_proxy_prefs(filename): def isNumber(x): try: float(x) return True except: return False proxy = {} proxy['host'] = "" proxy['port'] = 0 proxy['type'] = 0 proxy['autoconfig_url'] = "" try: f = open(filename, "r") except: logging.warning("couldn't open gecko preferences file " + filename) return proxy for line in f.readlines(): if 'network' in line: if '"network.proxy.http"' in line: proxy['host'] = line.split('"')[3] elif '"network.proxy.autoconfig_url"' in line: proxy['autoconfig_url'] = line.split('"')[3] elif '"network.proxy.http_port"' in line: proxy['port'] = int("".join([c for c in line.split('"')[2] if isNumber(c)])) elif '"network.proxy.type"' in line: proxy['type'] = int("".join([c for c in line.split('"')[2] if isNumber(c)])) f.close() return proxy def get_pynotify_ok(): if not HAS_PYNOTIFY: #print "pynotify not found, using fallback notifications" return False # first get what package config reports cmd = "pkg-config notify-python --modversion" p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval = p.wait() stderr = p.stderr.read() if len(stderr) > 1 or retval != 0: logging.warning("trouble getting notify-python version from pkg-config, using fallback notifications") return False major,minor,rev = p.stdout.read().split('.') major = int(major) minor = int(minor) rev = int(rev) # if it's bad, return false if minor < 1: logging.info("pynotify too old, using fallback notifications") return False if minor == 1 and rev == 0: logging.info("pynotify too old, using fallback notifications") return False # # if it's good, check to see it's not lying about prefix # cmd = "pkg-config notify-python --variable=prefix" # p = subprocess.Popen(cmd, shell=True, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # retval = p.wait() # stderr = p.stderr.read() # if len(stderr) > 1 or retval != 0: # logging.info("trouble getting notify-python prefix from pkg-config, using fallback notifications") # return False # pkgconfig_prefix = p.stdout.read().strip() # # try: # dirname = os.path.split(pynotify.__file__)[0] # f = open(os.path.join(dirname, "_pynotify.la")) # except: # logging.info("trouble opening _pynotify.la, using fallback notifications") # return False # # libdir_line = "" # # for line in f.readlines(): # if line[0:6] == "libdir": # libdir_line = line.split("=")[1][1:-1] # break # f.close() # # libdir_line = libdir_line.strip() # # if len(libdir_line) == 0: # logging.info("trouble reading _pynotify.la, using fallback notifications") # return False # # if pkgconfig_prefix not in libdir_line: # logging.info("pkgconfig does not agree with _pynotify.la, using fallback notifications") # return False # # logging.info("Using pynotify notifications") return True def db_except(default_retval=None): def annotate(func): def _exec_cb(self, *args, **kwargs): try: return func(self, *args, **kwargs) except OperationalError: logging.debug("DB Exception") self._handle_db_exception() return _exec_cb(self, *args, **kwargs) return _exec_cb return annotate if is_kde(): import kio PenguinTV-4.2.0/penguintv/EntryList.py0000644000000000000000000004146011151272467014623 0ustar import string import logging import time import gtk, gobject import ptvDB import penguintv TITLE = 0 MARKEDUPTITLE = 1 ENTRY_ID = 2 INDEX = 3 ICON = 4 FLAG = 5 FEED = 6 VISIBLE = 7 S_DEFAULT = 0 S_SEARCH = 1 #S_ACTIVE = 2 class EntryList(gobject.GObject): __gsignals__ = { 'entry-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_INT])), 'search-entry-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT, gobject.TYPE_INT, gobject.TYPE_PYOBJECT])), 'link-activated': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_PYOBJECT])), 'no-entry-selected': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, []), 'entrylist-resized': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ([gobject.TYPE_INT])) } def __init__(self, widget_tree, app, feed_list_view, main_window): gobject.GObject.__init__(self) self._widget = widget_tree.get_widget("entrylistview") self._main_window = main_window self._entrylist = gtk.ListStore(str, str, int, int, str, int, int, bool) #title, markeduptitle, entry_id, index, icon, flag, feed, visible self._app = app self._feed_id=None self._last_entry=None self._search_query = "" self._search_results = [] self._state = S_DEFAULT self._hide_viewed = False self.__populating = False self.__cancel_populating = False self.presently_selecting = False #build list view self._entry_filter = self._entrylist.filter_new() self._entry_filter.set_visible_column(VISIBLE) self._widget.set_model(self._entry_filter) icon_renderer = gtk.CellRendererPixbuf() renderer = gtk.CellRendererText() self._vadjustment = widget_tree.get_widget("entry_scrolled_window").get_vadjustment() self._hadjustment = widget_tree.get_widget("entry_scrolled_window").get_hadjustment() column = gtk.TreeViewColumn(_('Articles')) column.pack_start(icon_renderer, False) column.pack_start(renderer, True) column.set_attributes(icon_renderer, stock_id=4) column.set_attributes(renderer, markup=1) #column.set_property("sizing", gtk.TREE_VIEW_COLUMN_GROW_ONLY) #AUTOSIZE or GROW_ONLY or FIXED self._widget.append_column(column) #If you want to grow _and_ shrink, start uncommenting and switch above to autosize column = gtk.TreeViewColumn('') #column.set_property("resizable", False) #column.set_property("sizing", gtk.TREE_VIEW_COLUMN_AUTOSIZE) self._widget.append_column(column) self._widget.columns_autosize() self._widget.get_selection().set_mode(gtk.SELECTION_MULTIPLE) #signals self._widget.get_selection().connect("changed", self.item_selection_changed) self._widget.connect("row-activated", self.on_row_activated) self._widget.connect("button-press-event", self._on_button_press_event) self._handlers = [] h_id = feed_list_view.connect('feed-selected', self.__feedlist_feed_selected_cb) self._handlers.append((feed_list_view.disconnect, h_id)) h_id = feed_list_view.connect('search-feed-selected', self.__feedlist_search_feed_selected_cb) self._handlers.append((feed_list_view.disconnect, h_id)) h_id = feed_list_view.connect('no-feed-selected', self.__feedlist_none_selected_cb) self._handlers.append((feed_list_view.disconnect, h_id)) h_id = app.connect('feed-polled', self.__feed_polled_cb) self._handlers.append((app.disconnect, h_id)) h_id = app.connect('feed-removed', self.__feed_removed_cb) self._handlers.append((app.disconnect, h_id)) h_id = app.connect('entry-updated', self.__entry_updated_cb) self._handlers.append((app.disconnect, h_id)) h_id = app.connect('state-changed', self.__state_changed_cb) self._handlers.append((app.disconnect, h_id)) h_id = app.connect('entries-viewed', self.__entries_viewed_cb) self._handlers.append((app.disconnect, h_id)) h_id = app.connect('entries-unviewed', self.__entries_viewed_cb) self._handlers.append((app.disconnect, h_id)) def finalize(self): for disconnector, h_id in self._handlers: disconnector(h_id) def set_entry_view(self, entry_view): h_id = entry_view.connect('entries-viewed', self.__entries_viewed_cb) self._handlers.append((entry_view.disconnect, h_id)) def __feedlist_feed_selected_cb(self, o, feed_id): self.populate_entries(feed_id) def __feedlist_search_feed_selected_cb(self, o, feed_id): for e in self._entrylist: if e[FEED] == feed_id: e[VISIBLE] = True else: e[VISIBLE] = False def __feedlist_none_selected_cb(self, o): if self._state == S_SEARCH: for e in self._entrylist: e[VISIBLE] = True else: self.populate_entries(None) def __feed_polled_cb(self, app, feed_id, update_data): feed_list = self._app.db.get_associated_feeds(feed_id) if self._feed_id in feed_list: self.update_entry_list() def __feed_removed_cb(self, app, feed_id): if feed_id == self._feed_id: self.clear_entries() def __entry_updated_cb(self, app, entry_id, feed_id): self.update_entry_list(entry_id) def __entries_viewed_cb(self, app, viewlist): for feed_id, entrylist in viewlist: feed_list = self._app.db.get_associated_feeds(feed_id) if self._feed_id in feed_list: for e in entrylist: self.update_entry_list(e) def populate_if_selected(self, feed_id): if feed_id == self._feed_id: self.populate_entries(feed_id, -1) def populate_entries(self, feed_id, selected=-1): if self._state == S_SEARCH: if len(self._search_results) > 0: if feed_id in [s[1] for s in self._search_results]: self.show_search_results(self._search_results, self._search_query) #self.highlight_results(feed_id) return if self.__populating: self.__cancel_populating = True while gtk.events_pending(): gtk.main_iteration() if feed_id == self._feed_id: dont_autopane = True else: dont_autopane = False #it's a double negative, but it makes sense to me at the moment. self._feed_id = feed_id db_entrylist = self._app.db.get_entrylist(feed_id) if self._hide_viewed: unviewed = self._app.db.get_unread_entries(feed_id) db_entrylist = [e for e in db_entrylist if e[0] in unviewed] selection = self._widget.get_selection() if selected==-1: rows = selection.get_selected_rows() if len(rows[1]) > 0: item = rows[0][rows[1][-1]] try: selected=item[ENTRY_ID] index = item[INDEX] except Exception,e: print e print "rows: ",rows," item:",item self._entrylist.clear() self.emit('no-entry-selected') def populate_gen(): self.__populating = True i=-1 for entry_id,title,date,read,placeholder in db_entrylist: if self.__cancel_populating: self.__cancel_populating = False break i=i+1 flag = self._app.db.get_entry_flag(entry_id) icon = self._get_icon(flag) markeduptitle = self._get_markedup_title(title, flag) self._entrylist.append([title, markeduptitle, entry_id, i, icon, flag, feed_id, True]) if i % 100 == 99: #gtk.gdk.threads_leave() yield True if i == 25 and not dont_autopane: #ie, DO auto_pane please #this way we don't push the list way out because #of something we can't even see gobject.idle_add(self.auto_pane) #gtk.gdk.threads_leave() yield True if not self.__cancel_populating: if i<25 and not dont_autopane: #ie, DO auto_pane please gobject.idle_add(self.auto_pane) self._vadjustment.set_value(0) self._hadjustment.set_value(0) if selected>=0: index = self.find_index_of_item(selected) if index is not None: selection.select_path((index),) else: selection.unselect_all() self._widget.columns_autosize() self.__populating = False yield False if len(db_entrylist) > 300: #only use an idler when the list is getting long gobject.idle_add(populate_gen().next) else: for i in populate_gen(): if not i: #said the cat return def auto_pane(self): """Automatically adjusts the pane width to match the column width""" #If the second column exists, this cause the first column to shrink, #and then we can set the pane to the same size column = self._widget.get_column(0) new_width = column.get_width()+10 self.emit('entrylist-resized', new_width) return False def _get_icon(self, flag): """ This would be a nice place to drop in custom icons """ if flag & ptvDB.F_ERROR: return 'gtk-dialog-error' if flag & ptvDB.F_DOWNLOADING: return 'gtk-execute' if flag & ptvDB.F_DOWNLOADED: return 'gtk-harddisk' if flag & ptvDB.F_PAUSED: return 'gtk-media-pause' else: return 'gnome-stock-blank' def _get_markedup_title(self, title, flag): if flag & ptvDB.F_UNVIEWED == ptvDB.F_UNVIEWED: title=""+title+"" if flag & ptvDB.F_MEDIA == ptvDB.F_MEDIA: title=""+title+"" title = string.replace(title,"\n"," ") return title def update_entry_list(self, entry_id=None): if entry_id is None: if len(self._entrylist) != len(self._app.db.get_entrylist(self._feed_id)): self.populate_entries(self._feed_id) return for entry in self._entrylist: entry[FLAG] = self._app.db.get_entry_flag(entry[ENTRY_ID]) entry[MARKEDUPTITLE] = self._get_markedup_title(entry[TITLE],entry[FLAG]) entry[ICON] = self._get_icon(entry[FLAG]) else: try: index = self.find_index_of_item(entry_id) if index is not None: entry = self._entrylist[index] entry[FLAG] = self._app.db.get_entry_flag(entry_id) entry[MARKEDUPTITLE] = self._get_markedup_title(entry[TITLE],entry[FLAG]) entry[ICON] = self._get_icon(entry[FLAG]) else: return except: #we aren't even viewing this feed return if entry_id == self._last_entry: return True def mark_as_viewed(self, entry_id=None): index = self.find_index_of_item(entry_id) if index is not None: entry = self._entrylist[index] if entry[FLAG] & ptvDB.F_UNVIEWED: entry[FLAG] -= ptvDB.F_UNVIEWED entry[MARKEDUPTITLE] = self._get_markedup_title(entry[TITLE],entry[FLAG]) entry[ICON] = self._get_icon(entry[FLAG]) def show_search_results(self, entries, query): """Only show the first hundred SEARCHENGINE IS IN CHARGE OF THAT""" self._search_query = query if entries is None: entries = [] self._search_results = entries self._entrylist.clear() if len(entries) == 0: self.emit('no-entry-selected') return self._hide_viewed = False self._main_window.set_hide_entries_menuitem(self._hide_viewed) self._main_window.set_hide_entries_visibility(False) i=-1 for entry_id,title, fakedate, feed_id in entries: i+=1 try: entry = self._app.db.get_entry(entry_id) except ptvDB.NoEntry: raise ptvDB.BadSearchResults, "Entry not found, possible out of date index" flag = self._app.db.get_entry_flag(entry_id) icon = self._get_icon(flag) markeduptitle = self._get_markedup_title(entry['title'], flag) self._entrylist.append([entry['title'], markeduptitle, entry_id, i, icon, flag, feed_id, True]) self._vadjustment.set_value(0) self._hadjustment.set_value(0) self._widget.columns_autosize() gobject.idle_add(self.auto_pane) def _unset_state(self): if self._state == S_SEARCH: self._search_query = "" self._widget.get_selection().unselect_all() self._search_results = [] self._entrylist.clear() def __state_changed_cb(self, app, newstate, data=None): d = {penguintv.DEFAULT: S_DEFAULT, penguintv.MANUAL_SEARCH: S_SEARCH, penguintv.TAG_SEARCH: S_SEARCH, #penguintv.ACTIVE_DOWNLOADS: S_ACTIVE, penguintv.MAJOR_DB_OPERATION: S_DEFAULT} newstate = d[newstate] if newstate == self._state: return self._unset_state() self._state = newstate def _on_button_press_event(self, widget, event): if event.button==3: #right click self.do_context_menu(event) def item_selection_changed(self, selection): self.presently_selecting = True try: selected = self.get_selected(selection) #then do something with it except: self.presently_selecting = False return if selected is None: self.presently_selecting = False return if selected['entry_id'] != self._last_entry: self._last_entry = selected['entry_id'] #print "selected item: "+str(selected) #CONVENIENT #if self._showing_search: if selection.count_selected_rows()==1: if self._state == S_SEARCH: self.emit('search-entry-selected', selected['entry_id'], selected['feed_id'], self._search_query) else: self.emit('entry-selected', selected['entry_id'], selected['feed_id']) self.presently_selecting = False def get_selected(self, selection=None): if selection==None: s = self._widget.get_selection().get_selected_rows() else: s = selection.get_selected_rows() if s[0] is None or len(s[1])==0: return None s = s[0][s[1][-1]] selected={ 'title': s[TITLE], 'markeduptitle':s[MARKEDUPTITLE], 'entry_id': s[ENTRY_ID], 'index': s[INDEX], 'icon': s[ICON], 'flag': s[FLAG], 'feed_id': s[FEED]} return selected def get_selected_id(self, selection=None): selected = self.get_selected(selection) if selected is None: return 0 else: return selected['entry_id'] def set_selected(self, entry_id): index = self.find_index_of_item(entry_id) if index is not None: self._widget.get_selection().select_path((index,)) def clear_entries(self): self._entrylist.clear() def find_index_of_item(self, entry_id): list = [entry[ENTRY_ID] for entry in self._entrylist] try: return list.index(entry_id) except: return None def on_row_activated(self, treeview, path, view_column): index = path[0] model = treeview.get_model() item = self._app.db.get_entry(model[index][ENTRY_ID]) self.emit('link-activated', item['link']) def do_context_menu(self, event): """pops up a context menu for the item where the mouse is positioned""" #we can't go by the selected item, because that changes _after_ this executes #so we find out what is selected based on mouse position path = self._widget.get_path_at_pos(int(event.x),int(event.y)) if path is None: #nothing selected return index = path[0] selected={ 'title': self._entrylist[index][TITLE], 'markeduptitle':self._entrylist[index][MARKEDUPTITLE], 'entry_id': self._entrylist[index][ENTRY_ID], 'index': self._entrylist[index][INDEX], 'icon': self._entrylist[index][ICON], 'flag': self._entrylist[index][FLAG]} menu = gtk.Menu() if selected['flag'] & ptvDB.F_MEDIA: if selected['flag'] & ptvDB.F_DOWNLOADED == 0: item = gtk.ImageMenuItem(_("_Download")) img = gtk.image_new_from_stock('gtk-go-down',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate',self._main_window.on_download_entry_activate) menu.append(item) else: item = gtk.ImageMenuItem(_("_Re-Download")) img = gtk.image_new_from_stock('gtk-go-down',gtk.ICON_SIZE_MENU) item.set_image(img) item.connect('activate',self._main_window.on_download_entry_activate) menu.append(item) item = gtk.ImageMenuItem('gtk-media-play') item.connect('activate',self._main_window.on_play_entry_activate) menu.append(item) item = gtk.MenuItem(_("Delete")) item.connect('activate',self._main_window.on_delete_entry_media_activate) menu.append(item) if selected['flag'] & ptvDB.F_UNVIEWED: item = gtk.MenuItem(_("Mark as _Viewed")) item.connect('activate',self._main_window.on_mark_entry_as_viewed_activate) menu.append(item) else: item = gtk.MenuItem(_("Mark as _Unviewed")) item.connect('activate',self._main_window.on_mark_entry_as_unviewed_activate) menu.append(item) keep = self._app.db.get_entry_keep(selected['entry_id']) if keep: item = gtk.MenuItem(_("_Don't Keep New")) item.connect('activate',self._main_window.on_unkeep_entry_new_activate) menu.append(item) else: item = gtk.MenuItem(_("_Keep New")) item.connect('activate',self._main_window.on_keep_entry_new_activate) menu.append(item) if self._state != S_SEARCH: if self._hide_viewed: item = gtk.MenuItem(_("_Show All")) item.connect('activate', self._toggle_hide_viewed) menu.append(item) else: item = gtk.MenuItem(_("_Hide Viewed Entries")) item.connect('activate', self._toggle_hide_viewed) menu.append(item) menu.show_all() menu.popup(None,None,None, event.button,event.time) def set_hide_viewed(self, state): if state == self._hide_viewed: return self._toggle_hide_viewed() def _toggle_hide_viewed(self, e=None): if self._hide_viewed: self._hide_viewed = False else: self._hide_viewed = True self._main_window.set_hide_entries_menuitem(self._hide_viewed) self.populate_entries(self._feed_id) PenguinTV-4.2.0/penguintv/EntryFormatter.py0000644000000000000000000004150111421732304015636 0ustar import os, os.path import htmllib, HTMLParser, formatter import time import logging import traceback import gtk from ptvDB import D_NOT_DOWNLOADED, D_DOWNLOADING, D_DOWNLOADED, D_RESUMABLE, \ D_ERROR, D_WARNING import Downloader import utils GTKHTML=0 MOZILLA=1 class EntryFormatter: def __init__(self, mm=None, with_feed_titles=False, indicate_new=False, basic_progress=False, ajax_url=None, renderer=MOZILLA): self._mm = mm self._with_feed_titles = with_feed_titles self._indicate_new = indicate_new self._basic_progress = basic_progress self._ajax_url = ajax_url self._renderer = renderer def htmlify_item(self, item, convert_newlines=False): """ Take an item as returned from ptvDB and turn it into an HTML page. Very messy at times, but there are lots of alternate designs depending on the status of media. """ #global download_status ret = [] #ret.append('
    ') if self._indicate_new: if item['new']: javascript = "" if self._renderer == MOZILLA: javascript = """oncontextmenu="javascript:parent.location='rightclick:%i'" """ % item['entry_id'] ret.append("""
    """ % javascript) else: javascript = "" if self._renderer == MOZILLA: javascript = """oncontextmenu="javascript:parent.location='rightclick:%i'" """ % item['entry_id'] ret.append("""
    """ % javascript) else: javascript = "" if self._renderer == MOZILLA: javascript = """oncontextmenu="javascript:parent.location='rightclick:%i'" """ % item['entry_id'] ret.append("""
    """ % javascript) ret.append('''
    ''') if self._with_feed_titles: if item.has_key('title') and item.has_key('feed_title'): ret.append('
    %s:
    %s
    ' % (item['feed_title'],item['title'])) else: if item.has_key('title'): if self._indicate_new and item['new']: if utils.RUNNING_HILDON: indicator="*" else: indicator="✴" ret.append('
    %s%s
    ' % (item['entry_id'],indicator,item['title'])) else: ret.append('
    %s
    ' % (item['entry_id'],item['title'])) ret.append('
    ') if not utils.RUNNING_SUGAR: if self._renderer == MOZILLA: cb_status = item['keep'] and "CHECKED" or "UNCHECKED" cb_function = item['keep'] and "unkeep" or "keep" ret.append('''
    %s
    ''' % (cb_function, item['entry_id'], cb_status, cb_function, item['entry_id'], _('Keep New'))) elif self._renderer == GTKHTML: cb_function = item['keep'] and "unkeep" or "keep" if not item['keep']: link_name = _('Keep New') else: link_name = _("Don't Keep New") ret.append('''%s''' % (cb_function, item['entry_id'], link_name)) ret.append('
    ') if item.has_key('creator'): if item['creator']!="" and item['creator'] is not None: ret.append('By %s
    ' % (item['creator'],)) if item['date'] != (0,0,0,0,0,0,0,0,0): ret.append('
    %s
    ' % time.strftime('%a %b %d, %Y %X',time.localtime(item['date']))) #ret.append('
    ') if item.has_key('media'): for medium in item['media']: ret += self.htmlify_media(medium) ret.append('
    ') if item.has_key('description'): if convert_newlines: ret.append('%s' % item['description'].replace('\n', '
    ')) else: ret.append('%s' % item['description']) ret.append('
    ') if item.has_key('link'): ret.append('' + _("Open in Browser...") + '') ret.append('

    ') #print "\n".join(ret) return "\n".join(ret) def htmlify_media(self, medium): ret = [] ret.append('
    ') if medium['download_status']==D_NOT_DOWNLOADED: ret.append('''
    ''') ret.append(self._html_command('download:',medium['media_id']) + "") ret.append(self._html_command('downloadqueue:',medium['media_id']) + "") ret.append('(%s)

    ' % (utils.format_size(medium['size'],)) + "
    ") elif medium['download_status'] == D_DOWNLOADING: if self._basic_progress: if self._ajax_url is None: ret.append('') else: ret.append('') ret.append(''+_('Downloading %s...') % utils.format_size(medium['size'])+' '+self._html_command('pause:',medium['media_id'])+' '+self._html_command('stop:',medium['media_id'])) elif medium.has_key('progress_message'): #downloading and we have a custom message if self._ajax_url is None: ret.append('') else: ret.append('') ret.append('

    '+medium['progress_message']+'

    ') ret.append('''
    ''') ret.append(self._html_command('pause:',medium['media_id']) + "") ret.append(self._html_command('stop:',medium['media_id'])+"
    ") elif self._mm.has_downloader(medium['media_id']): #we have a downloader object downloader = self._mm.get_downloader(medium['media_id']) if downloader.status == Downloader.DOWNLOADING: d = {'progress':downloader.progress, 'size':utils.format_size(medium['size'])} #ret.append('

    '+_("Downloaded %(progress)d%% of %(size)s") % d +' '+ ret.append('''''') ret.append('''
    ''') if self._ajax_url is None: ret.append('') else: ret.append('') ret.append("") ret.append(self._html_progress_bar(d['progress'], d['size']) + "") ret.append(self._html_command('pause:',medium['media_id']) + "") ret.append(self._html_command('stop:',medium['media_id'])+"
    ") elif downloader.status == Downloader.QUEUED: ret.append('

    '+_("Download queued") +'

    ') ret.append('''
    ''') ret.append(self._html_command('pause:',medium['media_id']) + "") ret.append(self._html_command('stop:',medium['media_id'])+"
    ") elif medium.has_key('progress'): #no custom message, but we have a progress value d = {'progress':medium['progress'], 'size':utils.format_size(medium['size'])} #ret.append('

    '+_("Downloaded %(progress)d%% of %(size)s") % d +' '+ ret.append('''''') ret.append('''
    ''') if self._ajax_url is None: ret.append('') else: ret.append('') ret.append("") ret.append(self._html_progress_bar(d['progress'], d['size']) + "") ret.append(self._html_command('pause:',medium['media_id']) + "") ret.append(self._html_command('stop:',medium['media_id'])+"
    ") else: # we have nothing to go on ret.append('

    '+_('Downloading %s...') % utils.format_size(medium['size'])+'

    ') ret.append('''
    ''') ret.append(self._html_command('pause:',medium['media_id']) + "") ret.append(self._html_command('stop:',medium['media_id'])+"
    ") elif medium['download_status'] == D_DOWNLOADED: if self._mm.has_downloader(medium['media_id']): downloader = self._mm.get_downloader(medium['media_id']) ret.append('

    '+ str(downloader.message)+'

    ') filename = medium['file'][medium['file'].rfind("/")+1:] if utils.is_known_media(medium['file']): #we have a handler if os.path.isdir(medium['file']) and medium['file'][-1]!='/': medium['file']=medium['file']+'/' ret.append('''") ret.append('
    ''') ret.append(self._html_command('play:',medium['media_id']) + "") ret.append(self._html_command('redownload',medium['media_id']) + "") ret.append(self._html_command('delete:',medium['media_id'])+"
    (%s: %s)
    ' % (medium['file'], filename, utils.format_size(medium['size']))) elif os.path.isdir(medium['file']): #it's a folder ret.append('''
    ''') ret.append(self._html_command('file://',medium['file']) + "") ret.append(self._html_command('redownload',medium['media_id']) + "") ret.append(self._html_command('delete:',medium['media_id'])+"
    ") else: #we have no idea what this is ret.append('''") ret.append('
    ''') ret.append(self._html_command('file://',medium['file']) + "") ret.append(self._html_command('redownload',medium['media_id']) + "") ret.append(self._html_command('delete:',medium['media_id'])+"
    (%s: %s)
    ' % (medium['file'], filename, utils.format_size(medium['size']))) elif medium['download_status'] == D_RESUMABLE: ret.append('''
    ''') ret.append(self._html_command('resume:',medium['media_id']) + "") ret.append(self._html_command('redownload',medium['media_id']) + "") ret.append(self._html_command('delete:',medium['media_id'])+"
    ") ret.append('(%s)
    ' % (utils.format_size(medium['size']),)) elif medium['download_status'] == D_ERROR: if len(medium['errormsg']) > 0: error_msg = medium['errormsg'] else: error_msg = _("There was an error downloading the file.") ret.append('''
    ''') ret.append(medium['url'][medium['url'].rfind('/')+1:]+': '+str(error_msg) + "") ret.append(self._html_command('retry',medium['media_id']) + "") ret.append(self._html_command('tryresume:',medium['media_id']) + "") ret.append(self._html_command('cancel:',medium['media_id'])+"
    ") ret.append('(%s)
    ' % (utils.format_size(medium['size']),)) ret.append('
    ') return ret commands={ 'play:': (_("Open with PenguinTV"), "gtk-media-play-ltr"), #FIXME: detect rtl ltr i18n 'download:': (_("Download"), "gtk-go-down"), 'downloadqueue:': (_("Download And Open"), "gtk-go-down"), 'pause:': (_("Pause"), "gtk-media-pause"), 'cancel:': (_("Cancel"), "gtk-cancel"), 'file://': (_("Open File"), "gtk-open"), 'delete:': (_("Delete"), "gtk-delete"), 'resume:': (_("Resume"), "gtk-go-down"), 'clear:': (_("Cancel"), "gtk-cancel"), 'stop:': (_("Stop"), "gtk-stop"), 'tryresume:': (("Try Resume"), "gtk-go-down")} if not utils.HAS_GSTREAMER: commands['play:'] = (_("Open"), "gtk-media-play-ltr") def _html_command(self, command,arg): """returns something like 'Open' for all the commands I have. Dictionary has keys of commands, and returns located strings. If ajax_url is given, return the icon name appended to it. Otherwise return a file:/// version""" theme = gtk.icon_theme_get_default() def _get_img_html(icon_name, ajax_url=None): if self._ajax_url is not None: return '' elif not utils.RUNNING_HILDON: iconinfo = theme.lookup_icon(icon_name, 16, gtk.ICON_LOOKUP_NO_SVG) icon_markup = "" if iconinfo is not None: icon_filename = iconinfo.get_filename() return '' return "" else: return "" #a couple special cases if command == "redownload": return ' '+_get_img_html(self.commands['download:'][1], self._ajax_url)+_("Re-Download")+"" if command == "retry": return ' '+_get_img_html(self.commands['download:'][1], self._ajax_url)+_("Retry")+'' return ' '+_get_img_html(self.commands[command][1], self._ajax_url)+self.commands[command][0]+'' def _html_progress_bar(self, percent, size): ret = [] width = 200 height = 15 bar_color = "#333333" ret.append('''
    ''') ret.append("""
    """ % (height, width)) #ret.append("""
    %i%%""" % (height, height * 0.8, percent)) ret.append("""
    """ % (bar_color, height, percent * (width / 100))) ret.append("""
    """) ret.append(""" (%s)""" % (size,)) ret.append("""
    """) return "\n".join(ret) #class HTMLimgParser(htmllib.HTMLParser): # def __init__(self): # htmllib.HTMLParser.__init__(self, formatter.NullFormatter()) # self.images=[] # # def do_img(self, attributes): # for name, value in attributes: # if name == 'src': # new_image = value # self.images.append(new_image) class HTMLHighlightParser(HTMLParser.HTMLParser): def __init__(self, highlight_terms): HTMLParser.HTMLParser.__init__(self) highlight_terms = highlight_terms.replace('"','') highlight_terms = highlight_terms.replace("'",'') self.terms = [a.upper() for a in highlight_terms.split() if len(a)>2 and a.upper() not in ['AND','NOT']] self.new_data = "" self.style_start="""""" self.style_end ="" self.tag_stack = [] def handle_starttag(self, tag, attrs): if len(attrs)>0: self.new_data+="<"+str(tag)+" "+" ".join([i[0]+"=\""+i[1]+"\"" for i in attrs])+">" else: self.new_data+="<"+str(tag)+">" self.tag_stack.append(tag) def handle_startendtag(self, tag, attrs): if len(attrs)>0: self.new_data+="<"+str(tag)+" "+" ".join([i[0]+"=\""+i[1]+"\"" for i in attrs])+"/>" else: self.new_data+="<"+str(tag)+"/>" self.tag_stack.pop(-1) def handle_endtag(self, tag): self.new_data+="" def handle_data(self, data): data_u = data.upper() if self.tag_stack[-1] != "style": for term in self.terms: l = len(term) place = 0 while place != -1: #we will never match on the replacement style because the replacement is all #lowercase and the terms are all uppercase place = data_u.find(term, place) if place == -1: break data = data [:place] + self.style_start + data [place:place+l] + self.style_end + data [place+l:] data_u = data_u[:place] + self.style_start + data_u[place:place+l] + self.style_end + data_u[place+l:] place+=len(self.style_start)+len(term)+len(self.style_end) self.new_data+=data class HTMLImgAuthParser(HTMLParser.HTMLParser): def __init__(self, domain, userpass): HTMLParser.HTMLParser.__init__(self) self._domain = domain self._userpass = userpass self.new_data = "" def handle_starttag(self, tag, attrs): new_attrs = [] if tag.upper() != "A": for a in attrs: if a[1] is not None: attr = (a[0], a[1].replace(self._domain, self._userpass+"@"+self._domain)) else: attr = (a[0], "") new_attrs.append(attr) attrs = new_attrs else: pass# "not doing link tag" if len(attrs)>0: self.new_data+="<"+str(tag)+" "+" ".join([i[0]+"=\""+i[1]+"\"" for i in attrs])+">" else: self.new_data+="<"+str(tag)+">" def handle_endtag(self, tag): self.new_data+="" def handle_startendtag(self, tag, attrs): new_attrs = [] for a in attrs: if a[1] is not None: attr = (a[0], a[1].replace(self._domain, self._userpass+"@"+self._domain)) else: attr = (a[0], "") new_attrs.append(attr) attrs = new_attrs if len(attrs)>0: self.new_data+="<"+str(tag)+" "+" ".join([i[0]+"=\""+i[1]+"\"" for i in attrs])+">" else: self.new_data+="<"+str(tag)+">" def handle_data(self, data): self.new_data+=data PenguinTV-4.2.0/penguintv/FilterSelectorDialog.py0000644000000000000000000002143111072521274016723 0ustar #thanks to http://www.daa.com.au/pipermail/pygtk/2003-November/006304.html #for the reordering code import gtk import pango from ptvDB import T_BUILTIN import utils F_NAME = 0 F_DISPLAY = 1 F_INDEX = 2 F_SEP = 3 class FilterSelectorDialog(gtk.Dialog): def __init__(self, xml, main_window): gtk.Dialog.__init__(self) self._xml = xml self._main_window = main_window #self._widget = self._xml.get_widget('dialog_tag_favorites') contents = xml.get_widget("dialog-vbox3") p = contents.get_parent() contents.unparent() self.vbox.add(contents) gtk.Dialog.set_title(self, p.get_title()) del p self._pane = self._xml.get_widget('hpaned') self._favorites_old_order = [] self._favorites_treeview = self._xml.get_widget('favorites_treeview') self._favorites_model = gtk.ListStore(str, #name of filter str, #text to display int, #original id bool) #separator self._favorites_treeview.set_model(self._favorites_model) column = gtk.TreeViewColumn(_('Favorites')) renderer = gtk.CellRendererText() column.pack_start(renderer) column.set_attributes(renderer, text=F_DISPLAY) column.set_alignment(0.5) self._favorites_treeview.append_column(column) self._all_tags_model = gtk.ListStore(str, str, int, bool) #same as above self._all_tags_treeview = self._xml.get_widget('all_tags_treeview') self._all_tags_treeview.set_model(self._all_tags_model) self._all_tags_treeview.set_row_separator_func(lambda model,iter:model[iter][F_SEP]==True) column = gtk.TreeViewColumn(_('All Tags')) renderer = gtk.CellRendererText() column.pack_start(renderer) column.set_attributes(renderer, text=F_DISPLAY) column.set_alignment(0.5) self._all_tags_treeview.append_column(column) self._TARGET_TYPE_INTEGER = 80 self._TARGET_TYPE_REORDER = 81 drop_types = [ ('reorder',gtk.TARGET_SAME_WIDGET,self._TARGET_TYPE_REORDER), ('integer',gtk.TARGET_SAME_APP,self._TARGET_TYPE_INTEGER)] #for removing items from favorites and reordering self._favorites_treeview.enable_model_drag_source(gtk.gdk.BUTTON1_MASK, drop_types, gtk.gdk.ACTION_MOVE) self._all_tags_treeview.drag_dest_set(gtk.DEST_DEFAULT_ALL, drop_types, gtk.gdk.ACTION_MOVE) #copying items to favorites self._favorites_treeview.enable_model_drag_dest(drop_types, gtk.gdk.ACTION_COPY) self._all_tags_treeview.drag_source_set(gtk.gdk.BUTTON1_MASK, drop_types, gtk.gdk.ACTION_COPY) self._dragging = False for key in dir(self.__class__): #python insaneness if key[:3] == '_on': self._xml.signal_connect(key, getattr(self, key)) self.connect('delete-event', self._on_delete_event) self._pane_position = 0 def set_taglists(self, all_tags, favorite_tags): self._all_tags_model.clear() self._favorites_model.clear() self._favorites_old_order = [] last_type = all_tags[0][3] i=-1 for favorite, name,display,f_type in all_tags: i+=1 if f_type != T_BUILTIN: if f_type != last_type: last_type = f_type self._all_tags_model.append(['---','---', -1, True]) self._all_tags_model.append([name, display, i, False]) if favorite > 0: self._favorites_old_order.append([favorite, name, display, i]) self._favorites_old_order.sort() for fav, name, display, index in self._favorites_old_order: self._favorites_model.append([name, display, index, False]) def is_visible(self): return self.get_property('visible') def Show(self): if utils.RUNNING_HILDON: self._all_tags_treeview.set_property('height-request', 150) self.resize(650,300) context = self.create_pango_context() style = self.get_style().copy() font_desc = style.font_desc metrics = context.get_metrics(font_desc, None) char_width = metrics.get_approximate_char_width() widest_left = 15 widest_right = 0 for row in self._all_tags_model: width = len(row[F_DISPLAY]) if width > widest_right: widest_right = width for row in self._favorites_model: width = len(row[F_DISPLAY]) if width > widest_left: widest_left = width self._pane_position = pango.PIXELS((widest_left+10)*char_width) self._window_width = pango.PIXELS((widest_left+widest_right+10)*char_width)+100 if not utils.RUNNING_HILDON: self.resize(self._window_width,1) self._pane.set_position(self._pane_position) self._favorites_treeview.columns_autosize() self._all_tags_treeview.columns_autosize() self.set_transient_for(self._main_window.get_parent()) self.show_all() if utils.RUNNING_HILDON: self._pane_position = 250 self._pane.set_position(self._pane_position) def Hide(self): self._do_unselect() self.hide() def _on_apply_clicked(self, button): new_order = [r[0] for r in self._favorites_model] old_order = [r[1] for r in self._favorites_old_order] if old_order != new_order: self._main_window.set_tag_favorites(new_order) self.Hide() def _on_close_clicked(self, button): self.Hide() #self.destroy() def _on_help_button_activate(self, event): dialog = gtk.Dialog(title=_("Tag Favorites Help"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) hbox = gtk.HBox() hbox.set_spacing(12) image = gtk.image_new_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_DIALOG) hbox.pack_start(image, False, False, 12) label = gtk.Label(_("""You can drag tags from the righthand side to the favorites list on the left. To remove a favorite, drag it from the lefthand side back to the right.""")) label.set_line_wrap(True) hbox.pack_start(label, True, True, 0) dialog.vbox.pack_start(hbox, True, True, 0) dialog.show_all() dialog.resize(400,-1) response = dialog.run() dialog.hide() del dialog def _on_delete_event(self, widget, event): return self.hide_on_delete() def _on_drag_data_get(self, treeview, drag_context, selection_data, info, time): selection = treeview.get_selection() model, iter = selection.get_selected() path = model.get_path(iter) selection_data.set(selection_data.target, 8, str(path[0])) def _on_all_tags_drag_data_received(self, treeview, context, x, y, selection, targetType, time): treeview.emit_stop_by_name('drag-data-received') if targetType == self._TARGET_TYPE_INTEGER: tag_index = "" for c in selection.data: if c != "\0": #for some reason ever other character is a null. what gives? tag_index = tag_index+c index = int(tag_index) target_iter = self._favorites_model.get_iter((index,)) self._favorites_model.remove(target_iter) self._on_drag_end(None, None) def _on_favorites_drag_data_received(self, treeview, context, x, y, selection, targetType, time): treeview.emit_stop_by_name('drag-data-received') if targetType == self._TARGET_TYPE_INTEGER: tag_index = "" for c in selection.data: if c != "\0": #for some reason ever other character is a null. what gives? tag_index = tag_index+c index = int(tag_index) source_row = self._all_tags_model[index] for row in self._favorites_model: if source_row[F_NAME] == row[F_NAME]: return new_row = [source_row[F_NAME],source_row[F_DISPLAY],index, False] try: path, pos = treeview.get_dest_row_at_pos(x, y) dest_iter = self._favorites_model.get_iter(path) self.iterCopy(self._favorites_model, dest_iter, new_row, pos) except: self._favorites_model.append(new_row) if targetType == self._TARGET_TYPE_REORDER: model, iter_to_copy = treeview.get_selection().get_selected() row = list(model[iter_to_copy]) try: path, pos = treeview.get_dest_row_at_pos(x, y) target_iter = model.get_iter(path) if self.checkSanity(model, iter_to_copy, target_iter): self.iterCopy(model, target_iter, row, pos) context.finish(True, True, time) else: context.finish(False, False, time) except: model.append(row) context.finish(True, True, time) self._on_drag_end(None, None) def checkSanity(self, model, iter_to_copy, target_iter): path_of_iter_to_copy = model.get_path(iter_to_copy) path_of_target_iter = model.get_path(target_iter) if path_of_target_iter[0:len(path_of_iter_to_copy)] == path_of_iter_to_copy: return False else: return True def iterCopy(self, target_model, target_iter, row, pos): if (pos == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE) or (pos == gtk.TREE_VIEW_DROP_INTO_OR_AFTER): new_iter = target_model.append(row) elif pos == gtk.TREE_VIEW_DROP_BEFORE: new_iter = target_model.insert_before(target_iter, row) elif pos == gtk.TREE_VIEW_DROP_AFTER: new_iter = target_model.insert_after(target_iter, row) def _on_drag_begin(self, widget, drag_context): self._dragging = True def _on_drag_end(self, widget, drag_context): self._dragging = False self._do_unselect() def _do_unselect(self): self._all_tags_treeview.get_selection().unselect_all() self._favorites_treeview.get_selection().unselect_all() PenguinTV-4.2.0/penguintv/DownloadView.py0000644000000000000000000002515411300011475015254 0ustar #a view that shows the current downloading media, and any unplayed media (For now) import gtk import time, os, glob import traceback import logging try: set except: from sets import Set as set import IconManager import MediaManager import ptvDB import utils if utils.RUNNING_HILDON: import hildon from Downloader import PAUSED, STOPPED, QUEUED D_MEDIA_ID = 0 D_DESCRIPTION = 1 D_DESCRIPTION_MARKUP = 2 D_PROGRESS = 3 D_SIZE = 4 D_SIZE_MARKUP = 5 D_PIXBUF = 6 D_STATUS = 7 D_STATUS_MARKUP = 8 MAX_WIDTH = 48 MAX_HEIGHT = 48 MIN_SIZE = 24 class DownloadView: def __init__(self, app, mm, db, gladefile): self._app = app self._mm = mm self._icon_manager = IconManager.IconManager(self._app.db.home) self._gladefile = gladefile self._downloads = [] self._unplayed_media = [] #contains media id self._downloads_liststore = gtk.ListStore(int, #media_id str, #description str, #description_markup int, #progress str, #size str, #size markup gtk.gdk.Pixbuf, #icon int, #status str) #status markup self.Show() #self.update_unplayed_media() def Show(self): widget_tree = gtk.glade.XML(self._gladefile, 'download_view','penguintv') for key in dir(self.__class__): #python insaneness if key[:3] == 'on_': widget_tree.signal_connect(key, getattr(self, key)) if utils.RUNNING_SUGAR: widget_tree.get_widget('stop_toolbutton').set_stock_id(None) widget_tree.get_widget('stop_toolbutton').set_icon_name('stock-close') widget_tree.get_widget('pause_toolbutton').set_stock_id(None) widget_tree.get_widget('pause_toolbutton').set_icon_name('stock-media-pause') widget_tree.get_widget('resume_toolbutton').set_stock_id(None) widget_tree.get_widget('resume_toolbutton').set_icon_name('stock-go-down') if utils.RUNNING_HILDON: sw = widget_tree.get_widget('d_v_scrolledwindow') hildon.hildon_helper_set_thumb_scrollbar(sw, True) self._widget = widget_tree.get_widget('download_view') self._resume_button = widget_tree.get_widget('resume_toolbutton') self._resume_button.set_sensitive(False) self._downloads_listview = widget_tree.get_widget('download_list') try: self._downloads_listview.set_rubber_banding(True) except: pass #not everyone can do this selection = self._downloads_listview.get_selection() selection.set_mode(gtk.SELECTION_MULTIPLE) selection.connect("changed", self._on_selection_changed) column = gtk.TreeViewColumn(_('')) column.set_resizable(True) renderer = gtk.CellRendererPixbuf() column.pack_start(renderer, True) column.set_attributes(renderer, pixbuf=D_PIXBUF) self._downloads_listview.append_column(column) column = gtk.TreeViewColumn(_('Progress')) column.set_resizable(True) renderer = gtk.CellRendererProgress() column.pack_start(renderer, True) column.set_attributes(renderer, value=D_PROGRESS) self._downloads_listview.append_column(column) column = gtk.TreeViewColumn(_('Description')) column.set_resizable(True) renderer = gtk.CellRendererText() column.pack_start(renderer, True) column.set_attributes(renderer, markup=D_DESCRIPTION_MARKUP) self._downloads_listview.append_column(column) column = gtk.TreeViewColumn(_('Size')) column.set_resizable(True) renderer = gtk.CellRendererText() column.pack_start(renderer, True) column.set_attributes(renderer, markup=D_SIZE_MARKUP) self._downloads_listview.append_column(column) column = gtk.TreeViewColumn(_('Status')) column.set_resizable(True) renderer = gtk.CellRendererText() column.pack_start(renderer, True) column.set_attributes(renderer, markup=D_STATUS_MARKUP) self._downloads_listview.append_column(column) self._downloads_listview.columns_autosize() self._downloads_listview.set_model(self._downloads_liststore) self._widget.show_all() def get_widget(self): return self._widget def update_downloads(self): """gets called a lot (once for every progress callback) so be quick""" self._downloads = self._mm.get_download_list() current_list = [item.media['media_id'] for item in self._downloads] viewing_list = [item[D_MEDIA_ID] for item in self._downloads_liststore] oldset = set(viewing_list) newset = set(current_list) removed = list(oldset.difference(newset)) added = list(newset.difference(oldset)) unchanged = list(oldset.intersection(newset)) #slower but works better, because the list is changing all over the place for item in removed: i=-1 for row in self._downloads_liststore: i+=1 if row[0] == item: self._downloads_liststore.remove(self._downloads_liststore.get_iter((i,))) break tree,selected = self._downloads_listview.get_selection().get_selected_rows() selected = [i[0] for i in selected] i=-1 for item in self._downloads_liststore: i+=1 if item[D_MEDIA_ID] in unchanged: index = current_list.index(item[D_MEDIA_ID]) medium = self._downloads[index] iter = self._downloads_liststore[i] iter[D_PROGRESS] = medium.progress iter[D_SIZE] = utils.format_size(medium.total_size) #iter[D_STATUS] refers to the old status if medium.status == PAUSED or medium.status == QUEUED: if iter[D_STATUS] != medium.status: if i in selected: iter[D_DESCRIPTION_MARKUP] = ''+utils.my_quote(iter[D_DESCRIPTION])+'' iter[D_SIZE_MARKUP]= ''+iter[D_SIZE]+'' if medium.status == PAUSED: iter[D_STATUS_MARKUP] = ''+_("Paused")+'' elif medium.status == QUEUED: iter[D_STATUS_MARKUP] = ''+_("Queued")+'' else: iter[D_DESCRIPTION_MARKUP] = ''+utils.my_quote(iter[D_DESCRIPTION])+'' iter[D_SIZE_MARKUP] = ''+iter[D_SIZE]+'' if medium.status == PAUSED: iter[D_STATUS_MARKUP] = ''+_("Paused")+'' elif medium.status == QUEUED: iter[D_STATUS_MARKUP] = ''+_("Queued")+'' iter[D_STATUS] = medium.status else: #if iter[D_STATUS] == PAUSED or i in selected: iter[D_DESCRIPTION_MARKUP] = utils.my_quote(iter[D_DESCRIPTION]) iter[D_SIZE_MARKUP]= iter[D_SIZE] iter[D_STATUS] = medium.status iter[D_STATUS_MARKUP] = "" #check resume button sensitivity resume_sens = False i=-1 for item in self._downloads_liststore: i+=1 if item[D_STATUS] == PAUSED or item[D_STATUS] == QUEUED: if i in selected: resume_sens = True break self._resume_button.set_sensitive(resume_sens) for media_id in added: item = self._downloads[current_list.index(media_id)] try: entry = self._app.db.get_entry(item.media['entry_id']) description = self._app.db.get_feed_title(entry['feed_id']) + " " + utils.get_hyphen() + " " + entry['title'] size = utils.format_size(item.total_size) except: logging.warning("trouble getting entry updating downloads: %s" % str(item)) continue if item.status == PAUSED: description_markup = ''+utils.my_quote(description)+'' size_markup = ''+size+'' status_markup = ''+_("Paused")+'' elif item.status == QUEUED: description_markup = ''+utils.my_quote(description)+'' size_markup = ''+size+'' status_markup = ''+_("Queued")+'' else: description_markup = utils.my_quote(description) size_markup = size status_markup = "" pixbuf = self._icon_manager.get_icon_pixbuf(entry['feed_id'], MAX_WIDTH, MAX_HEIGHT, MIN_SIZE, MIN_SIZE) self._downloads_liststore.append([media_id, description, description_markup, item.progress, size, size_markup, pixbuf, item.status, status_markup]) #make sure both lists are sorted the same way id_list = [row[D_MEDIA_ID] for row in self._downloads_liststore] self._downloads.sort(lambda x,y: id_list.index(x.media['media_id']) - id_list.index(y.media['media_id'])) def on_stop_toolbutton_clicked(self, widget): selection = self._downloads_listview.get_selection() tree,selected = selection.get_selected_rows() medialist = [] for index in selected: #build a list to avoid race conditions medialist.append((self._downloads[index[0]].status, self._downloads[index[0]].media)) #start by getting rid of queued, stopped, end with active downloads medialist.sort() for status, medium in medialist: print "stopping",medium['url'] self._app.do_cancel_download(medium) selection.unselect_all() def on_pause_toolbutton_clicked(self, widget): selection = self._downloads_listview.get_selection() tree,selected = selection.get_selected_rows() for index in selected: self._app.do_pause_download(self._downloads_liststore[index[0]][D_MEDIA_ID]) def on_resume_toolbutton_clicked(self, widget): selection = self._downloads_listview.get_selection() tree,selected = selection.get_selected_rows() for index in selected: self._app.do_resume_download(self._downloads_liststore[index[0]][D_MEDIA_ID]) def on_download_list_row_activated(self, treeview, path, viewcolumn): d = self._downloads[path[0]] self._app.select_entry(d.media['entry_id']) def _on_selection_changed(self, selection): tree,selected = selection.get_selected_rows() selected = [i[0] for i in selected] resume_sens = False i=-1 for item in self._downloads_liststore: i+=1 if item[D_STATUS] == PAUSED or item[D_STATUS] == QUEUED: if i in selected: item[D_DESCRIPTION_MARKUP] = ''+utils.my_quote(item[D_DESCRIPTION])+'' item[D_SIZE_MARKUP]= ''+item[D_SIZE]+'' if item[D_STATUS] == PAUSED: item[D_STATUS_MARKUP] = ''+_("Paused")+'' resume_sens = True elif item[D_STATUS] == QUEUED: item[D_STATUS_MARKUP] = ''+_("Queued")+'' else: item[D_DESCRIPTION_MARKUP] = ''+utils.my_quote(item[D_DESCRIPTION])+'' item[D_SIZE_MARKUP]= ''+item[D_SIZE]+'' if item[D_STATUS] == PAUSED: item[D_STATUS_MARKUP] = ''+_("Paused")+'' elif item[D_STATUS] == QUEUED: item[D_STATUS_MARKUP] = ''+_("Queued")+'' else: item[D_DESCRIPTION_MARKUP] = utils.my_quote(item[D_DESCRIPTION]) item[D_SIZE_MARKUP] = item[D_SIZE] item[D_STATUS_MARKUP] = "" self._resume_button.set_sensitive(resume_sens) PenguinTV-4.2.0/penguintv/PreferencesDialog.py0000644000000000000000000003367411314523331016245 0ustar # Written by Owen Williams # see LICENSE for license information import gtk import gobject import logging import penguintv import ptvDB import utils if utils.RUNNING_HILDON: import hildon class PreferencesDialog: def __init__(self,xml,app): self.xml = xml self._app = app self._article_sync = None self._window = xml.get_widget("window_preferences") self._window.set_transient_for(self._app.main_window.get_parent()) for key in dir(self.__class__): if key[:3] == 'on_': self.xml.signal_connect(key, getattr(self,key)) #init values self.feed_refresh_widget = self.xml.get_widget("feed_refresh") self.radio_refresh_spec = self.xml.get_widget("refresh_specified") self.radio_refresh_spec.connect("toggled", self.select_refresh, penguintv.REFRESH_SPECIFIED) self.radio_refresh_auto = self.xml.get_widget("refresh_auto") self.radio_refresh_auto.connect("toggled", self.select_refresh, penguintv.REFRESH_AUTO) self.radio_refresh_never = self.xml.get_widget("refresh_never") self.radio_refresh_never.connect("toggled", self.select_refresh, penguintv.REFRESH_NEVER) self.min_port_widget = self.xml.get_widget("min_port_entry") self.max_port_widget = self.xml.get_widget("max_port_entry") self.ul_limit_widget = self.xml.get_widget("upload_limit_entry") self.autoresume = self.xml.get_widget("auto_resume") self.poll_on_startup = self.xml.get_widget("poll_on_startup") self.show_notification_always = self.xml.get_widget("show_notification_always") self.auto_download_widget = self.xml.get_widget("auto_download") self.auto_download_limiter_widget = self.xml.get_widget("auto_download_limiter") self.auto_download_limit_widget = self.xml.get_widget("auto_download_limit") self.limiter_hbox_widget = self.xml.get_widget("limiter_hbox") self.cache_images_widget = self.xml.get_widget("cache_images") if utils.RUNNING_HILDON: self._hildon_chooser_button = gtk.Button("") self._hildon_chooser_button.connect('clicked', self.hildon_choose_folder) container = self.xml.get_widget("media_storage_container") old_chooser = self.xml.get_widget("media_storage_chooser") container.remove(old_chooser) container.add(self._hildon_chooser_button) del old_chooser model = gtk.ListStore(str) combo = self.xml.get_widget("sync_protocol_combo") combo.set_model(model) renderer = gtk.CellRendererText() combo.pack_start(renderer) combo.add_attribute(renderer, 'text', 0) def show(self): if utils.RUNNING_HILDON: self._window.resize(650,300) self._window.show_all() elif utils.RUNNING_SUGAR: self.auto_download_limiter_widget.hide() self.auto_download_limit_widget.hide() self.limiter_hbox_widget.hide() self.show_notification_always.hide() self.xml.get_widget("button_close").hide() elif self._window: self._window.show_all() if not utils.HAS_STATUS_ICON: self.show_notification_always.hide() if not utils.ENABLE_ARTICLESYNC: self.xml.get_widget("sync_contents").hide() self.xml.get_widget("notebook3").set_show_tabs(False) def extract_content(self): vbox = self.xml.get_widget('prefs_vbox') vbox.unparent() vbox.show_all() self._window = None if utils.RUNNING_SUGAR: self.auto_download_limiter_widget.hide() self.auto_download_limit_widget.hide() self.limiter_hbox_widget.hide() self.show_notification_always.hide() self.xml.get_widget("button_close").hide() #if utils.RUNNING_HILDON: # self.show_notification_always.hide() return vbox def hildon_choose_folder(self, widget): new_chooser = hildon.FileChooserDialog(self._app.main_window.window, action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER) new_chooser.set_default_response(gtk.RESPONSE_OK) new_chooser.set_current_folder(self._hildon_chooser_button.get_label()) response = new_chooser.run() if response == gtk.RESPONSE_OK: try: logging.debug("look it changed hildon") val = new_chooser.get_filename() self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', val) if not utils.HAS_GCONF: logging.debug("telling the app about the new setting") self._app.set_media_storage_location(val) except: pass elif response == gtk.RESPONSE_CANCEL: #print 'Closed, no files selected' pass new_chooser.destroy() def hide(self): if self._window: self._window.hide() def set_article_sync(self, article_sync): self._article_sync = article_sync combo = self.xml.get_widget("sync_protocol_combo") model = combo.get_model() model.clear() for name in self._article_sync.get_plugins().keys(): model.append([name]) def on_window_preferences_delete_event(self, widget, event): if self._window: return self._window.hide_on_delete() def set_feed_refresh_method(self, method): if method == penguintv.REFRESH_AUTO: self.radio_refresh_auto.set_active(True) elif method == penguintv.REFRESH_SPECIFIED: self.radio_refresh_spec.set_active(True) else: self.radio_refresh_never.set_active(True) def set_feed_refresh_frequency(self, freq): self.feed_refresh_widget.set_text(str(freq)) def set_bt_settings(self, bt_settings): self.min_port_widget.set_text(str(bt_settings['min_port'])) self.max_port_widget.set_text(str(bt_settings['max_port'])) self.ul_limit_widget.set_text(str(bt_settings['ul_limit'])) # self.dl_limit_widget.set_text(str(bt_settings['dl_limit'])) def set_auto_resume(self, autoresume): self.autoresume.set_active(autoresume) def set_poll_on_startup(self, poll_on_startup): self.poll_on_startup.set_active(poll_on_startup) def set_show_notification_always(self, always): self.show_notification_always.set_active(always) def set_cache_images(self, cache): self.cache_images_widget.set_active(cache) def set_auto_download(self, auto_download): self.auto_download_widget.set_active(auto_download) def set_auto_download_limiter(self, limiter): self.auto_download_limiter_widget.set_active(limiter) def set_auto_download_limit(self, limit): self.auto_download_limit_widget.set_text(str(limit/1024)) # print "set text to: "+str(limit/1024) def set_media_storage_location(self, location): if utils.RUNNING_HILDON: self._hildon_chooser_button.set_label(location) else: self.xml.get_widget("media_storage_chooser").set_current_folder(location) def set_media_storage_style(self, style): self.xml.get_widget("media_storage_style_cbb").set_active(style) def set_use_article_sync(self, enabled): self.xml.get_widget("sync_enabled_checkbox").set_active(enabled) self.xml.get_widget("sync_settings_frame").set_sensitive(enabled) self.xml.get_widget("sync_status_box").set_sensitive(enabled) def set_article_sync_plugin(self, plugin): combo = self.xml.get_widget("sync_protocol_combo") for row in combo.get_model(): if row[0] == plugin: combo.set_active_iter(row.iter) self._add_sync_ui(plugin) return def set_article_sync_readonly(self, readonly): self.xml.get_widget("sync_readonly_check").set_active(readonly) def get_media_storage_location(self): if utils.RUNNING_HILDON: return self._hildon_chooser_button.get_label() else: widget = self.xml.get_widget("media_storage_chooser") return widget.get_filename() def get_use_article_sync(self): return self.xml.get_widget("sync_enabled_checkbox").get_active() def get_article_sync_readonly(self): return self.xml.get_widget("sync_readonly_check").get_active() def set_sync_status(self, status): self.xml.get_widget("sync_status_label").set_text(status) def on_button_close_clicked(self,event): self.hide() #we just update the gconf keys here, and then the app is signalled and it updates itself def select_refresh(self, radiobutton, new_val): try: if new_val == penguintv.REFRESH_AUTO: self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method', 'auto') if not utils.HAS_GCONF: self._app.set_feed_refresh_method('auto') elif new_val == penguintv.REFRESH_SPECIFIED: self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method', 'specified') if not utils.HAS_GCONF: self._app.set_feed_refresh_method('specified') else: self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/feed_refresh_method', 'never') if not utils.HAS_GCONF: self._app.set_feed_refresh_method('never') except AttributeError: pass #this fails on startup, which is good because we haven't loaded the proper value in the app yet def on_feed_refresh_changed(self,event): try: val = int(self.feed_refresh_widget.get_text()) except ValueError: return self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/feed_refresh_frequency',val) if not utils.HAS_GCONF: self._app.set_polling_frequency(val) def on_auto_resume_toggled(self,event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_resume',self.autoresume.get_active()) if not utils.HAS_GCONF: self._app.set_auto_resume(self.autoresume.get_active()) def on_show_notification_always(self, event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/show_notification_always',self.show_notification_always.get_active()) if not utils.HAS_GCONF: self._app.set_show_notification_always(self.show_notification_always.get_active()) def on_poll_on_startup_toggled(self,event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/poll_on_startup',self.poll_on_startup.get_active()) if not utils.HAS_GCONF: self._app.set_poll_on_startup(self.poll_on_startup.get_active()) def on_cache_images_toggled(self, event): cache_images = self.cache_images_widget.get_active() self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/cache_images_locally', cache_images) if not utils.HAS_GCONF: self._app.set_cache_images(cache_images) def on_auto_download_toggled(self, event): auto_download = self.auto_download_widget.get_active() self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_download', auto_download) self.limiter_hbox_widget.set_sensitive(auto_download) if not utils.HAS_GCONF: self._app.set_auto_download(auto_download) def on_auto_download_limiter_toggled(self,event): self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/auto_download_limiter',self.auto_download_limiter_widget.get_active()) if not utils.HAS_GCONF: self._app.set_auto_download_limiter(self.auto_download_limiter_widget.get_active()) def on_auto_download_limit_focus_out_event(self, thing, event): try: limit = int(self.auto_download_limit_widget.get_text())*1024 # print "from prefs, setting gconf to, "+str(limit) except ValueError: return self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/auto_download_limit',limit) if not utils.HAS_GCONF: self._app.set_auto_download_limit(limit) def on_min_port_entry_changed(self,event): try: minport = int(self.min_port_widget.get_text()) except ValueError: return self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_min_port',minport) if not utils.HAS_GCONF: self._app.set_bt_minport(minport) def on_max_port_entry_changed(self,event): try: maxport = int(self.max_port_widget.get_text()) except ValueError: return self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_max_port',maxport) if not utils.HAS_GCONF: self._app.set_bt_maxport(maxport) def on_upload_limit_entry_changed(self,event): try: val = int(self.ul_limit_widget.get_text()) except ValueError: return self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/bt_ul_limit',val) if not utils.HAS_GCONF: self._app.set_bt_ullimit(val) def on_media_storage_chooser_file_set(self, widget): val = widget.get_filename() self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', val) if not utils.HAS_GCONF: logging.debug("telling the app about the new setting") self._app.set_media_storage_location(val) def on_media_storage_style_cbb_changed(self, widget): style = widget.get_active() if style < 0: style == 0 self._app.db.set_setting(ptvDB.INT, '/apps/penguintv/media_storage_style', style) if not utils.HAS_GCONF: self._app.set_media_storage_style(style) def on_sync_enabled_checkbox_toggled(self, widget): enabled = widget.get_active() self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/use_article_sync', enabled) if not utils.HAS_GCONF: self._app.set_use_article_sync(enabled) self.xml.get_widget("sync_settings_frame").set_sensitive(enabled) self.xml.get_widget("sync_status_box").set_sensitive(enabled) self.xml.get_widget("sync_readonly_check").set_sensitive(enabled) def on_sync_protocol_combo_changed(self, widget): current_plugin = self._article_sync.get_current_plugin() model = widget.get_model() it = widget.get_active_iter() plugin = model[it][0] if plugin == current_plugin: #logging.debug("same plugin") return #logging.debug("COMBO CHANGED") if self._article_sync.is_enabled(): self._app.sync_authenticate(newplugin=plugin) def _do_switch_ui(): if not self._article_sync.is_loaded(): logging.debug("prefs window: plugin not loaded yet") return True self._remove_sync_ui() self._add_sync_ui(plugin) #logging.debug("setting sync plugin to %s" % plugin) self._app.db.set_setting(ptvDB.STRING, '/apps/penguintv/article_sync_plugin', plugin) return False gobject.timeout_add(500, _do_switch_ui) def _remove_sync_ui(self): def infanticide(child): #logging.debug("destroying %s" % str(child)) child.destroy() container = self.xml.get_widget("sync_settings_vbox") container.foreach(infanticide) def _add_sync_ui(self, plugin): container = self.xml.get_widget("sync_settings_vbox") new_ui = self._article_sync.get_parameter_ui(plugin) container.add(new_ui) container.show_all() def on_sync_login_button_clicked(self, widget): self._app.sync_authenticate() def on_sync_readonly_check_toggled(self, widget): enabled = widget.get_active() self._app.db.set_setting(ptvDB.BOOL, '/apps/penguintv/sync_readonly', enabled) if not utils.HAS_GCONF: self._app.set_article_sync_readonly(enabled) PenguinTV-4.2.0/penguintv/Lucene.py0000644000000000000000000003640610646750251014105 0ustar from PyLucene import * import os, os.path import utils from pysqlite2 import dbapi2 as sqlite from threading import Lock import HTMLParser from time import sleep """ This class does the searching for PenguinTV. It has full access to its own database object. """ ENTRY_LIMIT=100 class Lucene: def __init__(self): if utils.RUNNING_SUGAR: import sugar.env self.home = os.path.join(sugar.env.get_profile_path(), 'penguintv') else: self.home = os.path.join(os.getenv('HOME'), ".penguintv") try: os.stat(self.home) except: try: os.mkdir(self.home) except: raise DBError, "error creating directories: "+self.home self._storeDir = os.path.join(self.home,"search_store") self.needs_index = False try: os.stat(os.path.join(self._storeDir,"NEEDSREINDEX")) #if that exists, we need to reindex self.needs_index = True except: pass if self.needs_index: try: os.remove(os.path.join(self._storeDir,"NEEDSREINDEX")) except: print "Error removing NEEDSREINDEX... check permisions inside %s" % (self.home) if not os.path.exists(self._storeDir): os.mkdir(self._storeDir) self.needs_index = True self._index_lock = Lock() self._quitting = False def finish(self, needs_index=False): if needs_index: self._interrupt() self._quitting = True def _interrupt(self): f = open(os.path.join(self._storeDir,"NEEDSREINDEX"),"w") f.close() def _get_db(self): try: if os.path.isfile(os.path.join(self.home,"penguintv4.db")) == False: raise DBError,"database file missing" db=sqlite.connect(os.path.join(self.home,"penguintv4.db"), timeout=10 ) db.isolation_level="DEFERRED" return db except: raise DBError, "Error connecting to database in Lucene module" def Do_Index_Threaded(self, callback): PythonThread(target=self.Do_Index, args=(callback,)).start() def Do_Index(self, callback=None): """loop through all feeds and entries and feed them to the beast""" def index_interrupt(): writer.close() self._index_lock.release() if callback is not None: callback() self._interrupt() return if not self._index_lock.acquire(False): print "already indexing, not trying to reindex again" return db = self._get_db() c = db.cursor() analyzer = StandardAnalyzer() store = FSDirectory.getDirectory(self._storeDir, True) writer = IndexWriter(store, analyzer, True) c.execute(u"""SELECT id, title, description FROM feeds""") feeds = c.fetchall() c.execute(u"""SELECT id, feed_id, title, description,fakedate FROM entries""") entries = c.fetchall() c.close() db.close() print "indexing feeds" def feed_index_generator(feeds): for feed_id, title, description in feeds: try: doc = Document() doc.add(Field("feed_id", str(feed_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("feed_title", title, Field.Store.YES, Field.Index.TOKENIZED)) doc.add(Field("feed_description", description, Field.Store.NO, Field.Index.TOKENIZED)) writer.addDocument(doc) except Exception, e: print "Failed in indexDocs:", e #sleep(0) #http://twistedmatrix.com/pipermail/twisted-python/2005-July/011052.html yield None for i in feed_index_generator(feeds): if self._quitting: return index_interrupt() print "indexing entries" def entry_index_generator(entries): for entry_id, feed_id, title, description, fakedate in entries: try: doc = Document() p = HTMLDataParser() p.feed(description) description = p.data doc.add(Field("entry_id", str(entry_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("entry_feed_id", str(feed_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) time = DateTools.timeToString(long(fakedate)*1000, DateTools.Resolution.HOUR) doc.add(Field("date", time, Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("entry_title",title, Field.Store.YES, Field.Index.TOKENIZED)) doc.add(Field("entry_description", description, Field.Store.NO, Field.Index.TOKENIZED)) writer.addDocument(doc) except Exception, e: print "Failed in indexDocs:", e #sleep(.005) yield None for i in entry_index_generator(entries): if self._quitting: return index_interrupt() print "optimizing" writer.optimize() writer.close() print "done indexing" self._index_lock.release() if callback is not None: callback() def Re_Index_Threaded(self,feedlist=[], entrylist=[]): PythonThread(target=self.Re_Index, args=(feedlist,entrylist)).start() def Re_Index(self, feedlist=[], entrylist=[]): if len(feedlist) == 0 and len(entrylist) == 0: return def reindex_interrupt(): indexModifier.close() self._index_lock.release() self._interrupt() return self._index_lock.acquire() db = self._get_db() c = db.cursor() analyzer = StandardAnalyzer() indexModifier = IndexModifier(self._storeDir, analyzer, False) #let it fail #except Exception, e: # print "index modifier error (probably lock)",e,type(e) # return feedlist = utils.uniquer(feedlist) entrylist = utils.uniquer(entrylist) feed_addition = [] entry_addition = [] for feed_id in feedlist: if self._quitting: return reindex_interrupt() try: c.execute(u"""SELECT title, description FROM feeds WHERE id=?""",(feed_id,)) title, description = c.fetchone() feed_addition.append((feed_id, title, description)) except TypeError: pass #it won't be readded. Assumption is we have deleted this feed for entry_id in entrylist: if self._quitting: return reindex_interrupt() try: c.execute(u"""SELECT feed_id, title, description, fakedate FROM entries WHERE id=?""",(entry_id,)) feed_id, title, description, fakedate = c.fetchone() entry_addition.append((entry_id, feed_id, title, description, fakedate)) except TypeError: pass c.close() db.close() entry_addition = utils.uniquer(entry_addition) if self._quitting: return reindex_interrupt() #first delete anything deleted or changed for feed_id in feedlist: try: indexModifier.deleteDocuments(Term("feed_id",str(feed_id))) except Exception, e: print "Failed deleting feed:", e for entry_id in entrylist: try: indexModifier.deleteDocuments(Term("entry_id",str(entry_id))) except Exception, e: print "Failed deleting entry:", e #now add back the changes #print [f[0] for f in feed_addition] for feed_id, title, description in feed_addition: if self._quitting: return reindex_interrupt() try: doc = Document() doc.add(Field("feed_id", str(feed_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("feed_title",title, Field.Store.YES, Field.Index.TOKENIZED)) doc.add(Field("feed_description", description, Field.Store.NO, Field.Index.TOKENIZED)) indexModifier.addDocument(doc) except Exception, e: print "Failed adding feed:", e #print [(e[0],e[1]) for e in entry_addition] for entry_id, feed_id, title, description, fakedate in entry_addition: if self._quitting: return reindex_interrupt() try: doc = Document() p = HTMLDataParser() p.feed(description) description = p.data doc.add(Field("entry_id", str(entry_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("entry_feed_id", str(feed_id), Field.Store.YES, Field.Index.UN_TOKENIZED)) time = DateTools.timeToString(long(fakedate)*1000, DateTools.Resolution.HOUR) doc.add(Field("date", time, Field.Store.YES, Field.Index.UN_TOKENIZED)) doc.add(Field("entry_title",title, Field.Store.YES, Field.Index.TOKENIZED)) doc.add(Field("entry_description", description, Field.Store.NO, Field.Index.TOKENIZED)) indexModifier.addDocument(doc) except Exception, e: print "Failed adding entry:", e indexModifier.flush() indexModifier.close() self._index_lock.release() def Search(self, command, blacklist=[], include=['feeds','entries'], since=0): """returns two lists, one of search results in feeds, and one for results in entries. It is sorted so that title results are first, description results are second""" if not self._index_lock.acquire(False): #if we are indexing, don't try to search #print "wouldn't get lock" return ([],[]) self._index_lock.release() analyzer = StandardAnalyzer() directory = FSDirectory.getDirectory(self._storeDir, False) searcher = IndexSearcher(directory) sort = Sort("date", True) #sort by fake date, reversed feed_results=[] entry_results=[] #MultiFindQuery has a bug in 2.0.0... for now don't use #queryparser = MultiFieldQueryParser(['title','description'], self.analyzer) #query = MultiFiendQueryParser.parse(command, ['title','description'], self.analyzer) def build_results(hits): """we use this four times, so save some typing""" for i, doc in hits: feed_id = doc.get("feed_id") if feed_id is None: feed_id = doc.get("entry_feed_id") feed_id = int(feed_id) try: if feed_id not in blacklist: entry_id = doc.get("entry_id") if entry_id is None: #meaning this is actually a feed (we could know that from above, but eh) feed_results.append(int(feed_id)) else: # meaning "entry" if len(entry_results) < ENTRY_LIMIT: title = doc.get("entry_title") fakedate = DateTools.stringToTime(doc.get("date")) / 1000.0 if fakedate > since: entry_results.append((int(entry_id),title, fakedate, feed_id)) #else: # print "excluding:"+doc.get("title") except Exception, e: print e print feed_id print blacklist #query FEED TITLES if 'feeds' in include: queryparser = QueryParser("feed_title", analyzer) query = QueryParser.parse(queryparser, command) hits = searcher.search(query) build_results(hits) #query FEED DESCRIPTIONS queryparser = QueryParser("feed_description", analyzer) query = QueryParser.parse(queryparser, command) hits = searcher.search(query) build_results(hits) if 'entries' in include: #ENTRY TITLES queryparser = QueryParser("entry_title", analyzer) query = QueryParser.parse(queryparser, command) hits = searcher.search(query, sort) build_results(hits) #ENTRY DESCRIPTIONS queryparser = QueryParser("entry_description", analyzer) query = QueryParser.parse(queryparser, command) hits = searcher.search(query, sort) build_results(hits) for entry in entry_results: feed_results.append(entry[3]) feed_results = utils.uniquer(feed_results) entry_results = utils.uniquer(entry_results) #need to resort because we merged two lists together entry_results.sort(lambda x,y: int(y[2] - x[2])) searcher.close() #for e in entry_results: # print e[2],e[1] return (feed_results, entry_results) def get_popular_terms(self, max_terms=100, junkWords=[], fields=[]): #ported from http://www.getopt.org/luke/ HighFreqTerms.java self._index_lock.acquire() def insert(l, val): #for item in l: # #try: # print val[0] # print l.index(val # l[l.index(val[0])]=(val[0], l[l.index(val[0])][1]+val[1]) # print "updating",l[l.index(val[0])] #except: # pass insert_at = -1 i=-1 for item in l: i+=1 if item[0] == val[0]: l[i] = (item[0], item[1]+val[1]) return if val[1]>item[1] and insert_at==-1: insert_at = i if insert_at >= 0: l.insert(insert_at, val) else: l.append(val) reader = IndexReader.open(self._storeDir) terms = reader.terms() pop_terms = {} #minFreq = 0 seen=[] while terms.next(): term = terms.term() field = term.field() if len(fields)>0: if field not in fields: continue if term.text() in junkWords: continue try: i = float(term.text()) continue except: pass text = term.text() #if text in seen: # print "exists" # i = seen.index(text) # pop_terms[i][1] = pop_terms[i][1] + terms.docFreq() #else: if not pop_terms.has_key(field): pop_terms[field]=[] pop_terms[field].append((text, terms.docFreq())) # seen.append(term.text()) #if terms.docFreq() > minFreq: # insert(pop_terms, (term.text(), terms.docFreq())) # if max_terms>0 and len(pop_terms) >= max_terms: # pop_terms.pop(-1) # minFreq = pop_terms[-1][1] def merge(l1, l2): if len(l1)>len(l2): l3 = l1 l2 = l1 l1 = l3 del l3 i=-1 for term,freq in l1: i+=1 while term < l2[i][0] and i= len(l2): l2.append((term,freq)) break if term == l2[i]: l2[i] = (l2[i][0], l2[i][1] + freq) if term > l2[i]: l2.insert(i,(term,freq)) field_rank = [] for key in pop_terms.keys(): field_rank.append((len(pop_terms[key]), key)) field_rank.sort() field_rank.reverse() for rank,key in field_rank[1:]: pop_terms[field_rank[0][1]] = self.merge(pop_terms[field_rank[0][1]],pop_terms[key]) #j=-1 #for term in pop_terms[field_rank[0][1]]: # j+=1 # if term in pop_terms[key]: # pop_terms[field_rank[0][1]][j] = (term, pop_terms[field_rank[0][1]][j][1] + pop_terms[key][pop_terms[key].index(term)][1]) pop_terms=pop_terms[field_rank[0][1]] #pop_terms.sort(lambda x,y: y[1]-x[1]) if max_terms>0: pop_terms = pop_terms[:max_terms] self._index_lock.release() return pop_terms def merge(self, l1, l2): """merges two sorted lists""" if len(l1)>len(l2): l3 = l1 l1 = l2 l2 = l3 del l3 i=-1 for term,freq in l1: i+=1 while term > l2[i][0]: i+=1 if i>=len(l2):break if i >= len(l2): l2.append((term,freq)) break if term == l2[i][0]: l2[i] = (l2[i][0], l2[i][1] + freq) if term < l2[i][0]: l2.insert(i,(term,freq)) return l2 class DBError(Exception): def __init__(self,error): self.error = error def __str__(self): return self.error class HTMLDataParser(HTMLParser.HTMLParser): def __init__(self): HTMLParser.HTMLParser.__init__(self) self.data = "" def handle_data(self, data): self.data+=data PenguinTV-4.2.0/penguintv/AddFeedUtils.py0000644000000000000000000001737711141102520015152 0ustar #imports copied from addfeeddialog import gtk import socket import gettext import os.path import traceback import sys import logging import HTMLParser import utils from ptvDB import FF_NOAUTODOWNLOAD, FF_NOSEARCH, FF_NOAUTOEXPIRE, \ FF_NOTIFYUPDATES, FF_ADDNEWLINES, FF_MARKASREAD import LoginDialog if utils.HAS_PYXML: import itunes _=gettext.gettext def correct_url(url, glade_prefix=None): """figures out if the url is a feed, or if it's actually a web page with a feed in it. Also does http auth. returns the correct url and a title""" import feedparser import HTMLParser import urlparse import urllib class my_url_opener(urllib.FancyURLopener): """Little class to pop up a login window""" NONE = 0 FAILED = 1 CANCELLED = 2 def __init__(self, widget): urllib.FancyURLopener.__init__(self) self.widget = widget self.username = None self.password = None self.tries = 0 self.failed_auth = 0 def prompt_user_passwd(self, host, realm): assert self.widget is not None if self.tries==3: self.failed_auth = my_url_opener.FAILED return (None,None) d = LoginDialog.LoginDialog(self.widget) response = d.run() d.hide() if response != gtk.RESPONSE_OK: self.failed_auth = my_url_opener.CANCELLED return (None,None) self.username = d.username self.password = d.password self.tries+=1 return (d.username, d.password) #account for various http aliases protocol = url.split(':')[0] if protocol in ('feed','itpc','pcast'): url = 'http' + url[url.find(':'):] if utils.HAS_PYXML: if itunes.is_itunes_url(url): try: url = itunes.get_rss_from_itunes(url) except: raise BadFeedURL,"Error trying to get itunes podcast" if glade_prefix is not None: #TODO: abstract this out so we can have a command-line testing version as well as gtk urllib._urlopener = my_url_opener(gtk.glade.XML(os.path.join(glade_prefix, 'dialogs.glade'), "dialog_login", 'penguintv')) else: urllib._urlopener = my_url_opener(None) url_stream = None try: #logging.debug("opening url: %s" % url) url_stream = urllib.urlopen(url) #logging.debug("done") except socket.timeout: raise BadFeedURL,"The website took too long to respond, and the connection timed out." except IOError, e: if "No such file or directory" in e: return correct_url("http://"+url, glade_prefix) raise BadFeedURL,"There was an error loading the url." except Exception, e: raise BadFeedURL,"There was an error loading the url." title = url if urllib._urlopener.failed_auth == my_url_opener.FAILED: raise AuthorizationFailed if urllib._urlopener.failed_auth == my_url_opener.CANCELLED: raise AuthorizationCancelled if urllib._urlopener.username is not None: #build an auth-compatible url #scheme://netloc/path;parameters?query#fragment #http://www.cwi.nl:80/%7Eguido/Python.html #('http', 'www.cwi.nl:80', '/%7Eguido/Python.html', '', '', '') u_t = urlparse.urlparse(url) url = u_t[0]+"://"+str(urllib._urlopener.username)+":"+str(urllib._urlopener.password)+"@"+u_t[1]+u_t[2] title = u_t[0]+"://"+str(urllib._urlopener.username)+":"+("*"*len(urllib._urlopener.password))+"@"+u_t[1]+u_t[2] if len(u_t[3])>0: url=url+";"+u_t[3] title=title+";"+u_t[3] if len(u_t[4])>0: url=url+"?"+u_t[4] title=title+";"+u_t[4] if len(u_t[5])>0: url=url+"#"+u_t[5] title=title+";"+u_t[5] url_stream = urllib.urlopen(url) mimetype = url_stream.info()['Content-Type'].split(';')[0].strip() handled_mimetypes = ['application/atom+xml','application/rss+xml','application/rdf+xml','application/xml','text/xml', 'text/plain'] if mimetype in handled_mimetypes: pass elif mimetype in ['text/html', 'application/xhtml+xml']: p = AltParser() try: for line in url_stream.readlines(): p.feed(line) if p.head_end: #if we've gotten an error, we need the whole page break #otherwise the header is enough available_versions = p.alt_tags if len(available_versions)==0: #this might actually be a feed data = feedparser.parse(url) if len(data['channel']) == 0 or len(data['items']) == 0: #nope raise BadFeedURL, "warning: no alt mimetypes: %s" % str(p.alt_tags) else: pass #we're good else: newurl="" url_choices = [] for mimetype, pos_url, t in available_versions: if mimetype in handled_mimetypes: #first clean it up if pos_url[:4]!="http": #maybe the url is not fully qualified (fix for metaphilm.com) if pos_url[0:2] == '//': #fix for gnomefiles.org pos_url = "http:"+pos_url elif pos_url[0] == '/': #fix for lwn.net. Maybe we should do more proper base detection? parsed = urlparse.urlsplit(url) pos_url=parsed[0]+"://"+parsed[1]+pos_url else: pos_url=os.path.split(url)[0]+'/'+pos_url #now test sizes url_choices.append((pos_url, t)) if len(url_choices) > 1: newurl, title = _choose_url(url_choices) if newurl is None: raise BadFeedURL, "User canceled operation" elif len(url_choices) == 1: newurl, title = url_choices[0] if newurl == "": raise BadFeedURL, "warning: unhandled alt mimetypes: %s" % str(p.alt_tags) url = newurl except HTMLParser.HTMLParseError: exc_type, exc_value, exc_traceback = sys.exc_info() error_msg = "" for s in traceback.format_exception(exc_type, exc_value, exc_traceback): error_msg += s #sometimes this is actually the feed (pogue's posts @ nytimes.com) try: p = feedparser.parse(url) except Exception, e: raise BadFeedURL, "feedparser error: %s" % str(e) if len(p['channel']) == 0 or len(p['items']) == 0: #ok there really is a problem here raise BadFeedURL, "htmlparser error: %s" % error_msg else: raise BadFeedURL, "warning: unhandled page mimetypes: %s<--" % str(mimetype) return (url,title) def _choose_url(url_list): dialog = gtk.Dialog(title=_("Choose Feed"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Please choose one of the feeds in this page")) dialog.vbox.pack_start(label, True, True, 0) list_widget = gtk.TreeView() model = gtk.ListStore(str, str) r = gtk.CellRendererText() c = gtk.TreeViewColumn('Feeds') c.pack_start(r) c.set_attributes(r, markup=1) list_widget.append_column(c) list_widget.set_model(model) dialog.vbox.pack_start(list_widget) for url, title in url_list: model.append((url, title)) dialog.show_all() response = dialog.run() dialog.hide() del dialog if response == gtk.RESPONSE_ACCEPT: selection = list_widget.get_selection() s_iter = selection.get_selected()[1] if s_iter is None: return (None, None) return list(model[s_iter]) return (None, None) class AltParser(HTMLParser.HTMLParser): def __init__(self): HTMLParser.HTMLParser.__init__(self) self.alt_tags=[] self.head_end=False def handle_starttag(self, tag, attrs): """Signal when we get to a tag.""" if tag=='link': attr_dic = {} for attr in attrs: attr_dic[attr[0]] = attr[1] try: if attr_dic['rel'] == 'alternate': if attr_dic['type'] in ['application/atom+xml','application/rss+xml','text/xml']: attr_dic.setdefault('title',attr_dic['href']) self.alt_tags.append((attr_dic['type'], attr_dic['href'], attr_dic['title'])) except: pass def handle_endtag(self, tag): if tag == 'head': self.head_end=True class AuthorizationFailed(Exception): def __init__(self): pass def __str__(self): return "Bad username or password" class AuthorizationCancelled(Exception): def __init__(self): pass def __str__(self): return "Authorization cancelled" class BadFeedURL(Exception): def __init__(self, message="couldn't get a feed from this url"): self.message = message def __str__(self): return self.message PenguinTV-4.2.0/penguintv/HildonListener.py0000644000000000000000000000267410752666335015624 0ustar # # Contains listeners for various hildon-specific signals like hardware buttons, # hardware status, etc # #hardware buttons #hardware state #system exit #save-on-minimize stuff? #network status import logging import gtk import osso import conic class HildonListener: def __init__(self, app, h_window, h_context): self._app = app self._h_window = h_window self._h_context = h_context state = osso.DeviceState(self._h_context) state.set_device_state_callback(self._device_state_cb) con = conic.Connection() con.connect('connection-event', self._connection_cb) if not con.request_connection(conic.CONNECT_FLAG_NONE): logging.debug("error with conic connection thingy") def _device_state_cb(self, shutdown, save_unsaved_data, memory_low, system_inactivity, message, loop): print "Shutdown: ", shutdown print "Save unsaved data: ", save_unsaved_data print "Memory low: ", memory_low print "System Inactivity: ", system_inactivity print "Message: ", message def _connection_cb(self, con, event): status = event.get_status() if status == conic.CONNECTION_CONNECTED: logging.debug("CONIC CONNECTED") self._app.maybe_change_online_status(True) elif status == conic.CONNECTION_DISCONNECTING: logging.debug("CONIC DISCONNECTING") self._app.maybe_change_online_status(False) elif status == conic.CONNECTION_DISCONNECTED: logging.debug("CONIC DISCONNECTED") self._app.maybe_change_online_status(False) PenguinTV-4.2.0/penguintv/IconManager.py0000644000000000000000000000747610765650112015057 0ustar import os, os.path import glob import urllib import logging ICONSIZE = 48, 48 try: import Image HAS_PIL = True except: HAS_PIL = False class IconManager: """A small class that handles favicons for feeds""" def __init__(self, home): self._home = home try: os.stat(os.path.join(self._home, 'icons')) except: os.mkdir(os.path.join(self._home, 'icons')) def icon_exists(self, feed_id): filename = os.path.join(self._home, 'icons', str(feed_id) + '.*') result = glob.glob(filename) result = [r for r in result if r[-4:].upper() != "NONE"] return result > 0 def get_icon(self, feed_id): filename = os.path.join(self._home, 'icons', str(feed_id) + '.*') result = glob.glob(filename) if len(result) == 0: return None return result[0] def get_icon_pixbuf(self, feed_id, max_width=None, max_height=None, min_width=None, min_height=None): import gtk if min_width is None: min_width = 8 if min_height is None: min_height = 8 filename = self.get_icon(feed_id) if filename is None: p = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, min_width, min_height) p.fill(0xffffff00) return p try: p = gtk.gdk.pixbuf_new_from_file(filename) except: p = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,True,8, min_width, min_height) p.fill(0xffffff00) return p height = p.get_height() width = p.get_width() if max_height is not None: if height > max_height: height = max_height width = p.get_width() * height / p.get_height() if max_width is not None: if width > max_width: width = max_width height = p.get_height() * width / p.get_width() if min_height is not None: if height < min_height: height = min_height width = p.get_width() * height / p.get_height() if min_width is not None: if width < min_width: width = min_width height = p.get_height() * width / p.get_width() if height != p.get_height() or width != p.get_width(): del p p = gtk.gdk.pixbuf_new_from_file_at_size(filename, width, height) return p def download_icon(self, feed_id, feedparser_data): url_list = [] try: url_list.append(feedparser_data['feed']['image']['href']) except: pass try: url_list.append(feedparser_data['feed']['link'] + '/favicon.ico') except: pass found=False for url in url_list: try: filename = os.path.join(self._home, 'icons', str(feed_id) + '.' + url.split('.')[-1]) urllib.urlretrieve(url, filename) found = True break except: pass if found: if HAS_PIL: try: im = Image.open(filename) if im.size[0] > ICONSIZE[0] or im.size[1] > ICONSIZE[1]: im.thumbnail(ICONSIZE, Image.ANTIALIAS) im.save(filename+".thumb", "PNG") os.remove(filename) newname = ".".join(filename.split('.')[:-1]) + ".png" os.rename(filename+".thumb", newname) except Exception, e: logging.warning("Feed %i: Couldn't resize feed icon: %s" % (feed_id, str(e))) else: logging.warning("Don't have Python Imaging, can't resize icons") return url f = open(os.path.join(self._home, 'icons', str(feed_id)+'.none'), 'w') f.write("") f.close() return None def remove_icon(self, feed_id): filename = os.path.join(self._home, 'icons', str(feed_id) + '.*') result = glob.glob(filename) for r in result: print "deleting icon:",r os.remove(r) def is_icon_up_to_date(self, feed_id, old_href, feedparser_data): url_list = [] try: url_list.append(feedparser_data['feed']['image']['href']) except: pass try: url_list.append(feedparser_data['feed']['link'] + '/favicon.ico') except: pass if len(url_list) > 0: if old_href in url_list: filename = os.path.join(self._home, 'icons', str(feed_id) + '.*') result = glob.glob(filename) if len(result) == 0: #whoops, there's no file there anymore return False return True return False PenguinTV-4.2.0/penguintv/amazon/0000755000000000000000000000000011450514774013576 5ustar PenguinTV-4.2.0/penguintv/amazon/S3.py0000644000000000000000000005136310760334651014442 0ustar #!/usr/bin/env python # This software code is made available "AS IS" without warranties of any # kind. You may copy, display, modify and redistribute the software # code either by itself or as incorporated into your code; provided that # you do not remove any proprietary notices. Your use of this software # code is at your own risk and you waive any claim against Amazon # Digital Services, Inc. or its affiliates with respect to your use of # this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its # affiliates. import base64 import hmac import httplib import re import sha import sys import time import urllib import urlparse import xml.sax DEFAULT_HOST = 's3.amazonaws.com' PORTS_BY_SECURITY = { True: 443, False: 80 } METADATA_PREFIX = 'x-amz-meta-' AMAZON_HEADER_PREFIX = 'x-amz-' # generates the aws canonical string for the given parameters def canonical_string(method, bucket="", key="", query_args={}, headers={}, expires=None): interesting_headers = {} for header_key in headers: lk = header_key.lower() if lk in ['content-md5', 'content-type', 'date'] or lk.startswith(AMAZON_HEADER_PREFIX): interesting_headers[lk] = headers[header_key].strip() # these keys get empty strings if they don't exist if not interesting_headers.has_key('content-type'): interesting_headers['content-type'] = '' if not interesting_headers.has_key('content-md5'): interesting_headers['content-md5'] = '' # just in case someone used this. it's not necessary in this lib. if interesting_headers.has_key('x-amz-date'): interesting_headers['date'] = '' # if you're using expires for query string auth, then it trumps date # (and x-amz-date) if expires: interesting_headers['date'] = str(expires) sorted_header_keys = interesting_headers.keys() sorted_header_keys.sort() buf = "%s\n" % method for header_key in sorted_header_keys: if header_key.startswith(AMAZON_HEADER_PREFIX): buf += "%s:%s\n" % (header_key, interesting_headers[header_key]) else: buf += "%s\n" % interesting_headers[header_key] # append the bucket if it exists if bucket != "": buf += "/%s" % bucket # add the key. even if it doesn't exist, add the slash buf += "/%s" % urllib.quote_plus(key) # handle special query string arguments if query_args.has_key("acl"): buf += "?acl" elif query_args.has_key("torrent"): buf += "?torrent" elif query_args.has_key("logging"): buf += "?logging" elif query_args.has_key("location"): buf += "?location" return buf # computes the base64'ed hmac-sha hash of the canonical string and the secret # access key, optionally urlencoding the result def encode(aws_secret_access_key, str, urlencode=False): b64_hmac = base64.encodestring(hmac.new(aws_secret_access_key, str, sha).digest()).strip() if urlencode: return urllib.quote_plus(b64_hmac) else: return b64_hmac def merge_meta(headers, metadata): final_headers = headers.copy() for k in metadata.keys(): final_headers[METADATA_PREFIX + k] = metadata[k] return final_headers # builds the query arg string def query_args_hash_to_string(query_args): query_string = "" pairs = [] for k, v in query_args.items(): piece = k if v != None: piece += "=%s" % urllib.quote_plus(str(v)) pairs.append(piece) return '&'.join(pairs) class CallingFormat: PATH = 1 SUBDOMAIN = 2 VANITY = 3 def build_url_base(protocol, server, port, bucket, calling_format): url_base = '%s://' % protocol if bucket == '': url_base += server elif calling_format == CallingFormat.SUBDOMAIN: url_base += "%s.%s" % (bucket, server) elif calling_format == CallingFormat.VANITY: url_base += bucket else: url_base += server url_base += ":%s" % port if (bucket != '') and (calling_format == CallingFormat.PATH): url_base += "/%s" % bucket return url_base build_url_base = staticmethod(build_url_base) class Location: DEFAULT = None EU = 'EU' class AWSAuthConnection: def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): if not port: port = PORTS_BY_SECURITY[is_secure] self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key self.is_secure = is_secure self.server = server self.port = port self.calling_format = calling_format def create_bucket(self, bucket, headers={}): return Response(self._make_request('PUT', bucket, '', {}, headers)) def create_located_bucket(self, bucket, location=Location.DEFAULT, headers={}): if location == Location.DEFAULT: body = "" else: body = "" + \ location + \ "" return Response(self._make_request('PUT', bucket, '', {}, headers, body)) def check_bucket_exists(self, bucket): return self._make_request('HEAD', bucket, '', {}, {}) def list_bucket(self, bucket, options={}, headers={}): return ListBucketResponse(self._make_request('GET', bucket, '', options, headers)) def delete_bucket(self, bucket, headers={}): return Response(self._make_request('DELETE', bucket, '', {}, headers)) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) return Response( self._make_request( 'PUT', bucket, key, {}, headers, object.data, object.metadata)) def get(self, bucket, key, headers={}): return GetResponse( self._make_request('GET', bucket, key, {}, headers)) def delete(self, bucket, key, headers={}): return Response( self._make_request('DELETE', bucket, key, {}, headers)) def get_bucket_logging(self, bucket, headers={}): return GetResponse(self._make_request('GET', bucket, '', { 'logging': None }, headers)) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): return Response(self._make_request('PUT', bucket, '', { 'logging': None }, headers, logging_xml_doc)) def get_bucket_acl(self, bucket, headers={}): return self.get_acl(bucket, '', headers) def get_acl(self, bucket, key, headers={}): return GetResponse( self._make_request('GET', bucket, key, { 'acl': None }, headers)) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): return self.put_acl(bucket, '', acl_xml_document, headers) def put_acl(self, bucket, key, acl_xml_document, headers={}): return Response( self._make_request( 'PUT', bucket, key, { 'acl': None }, headers, acl_xml_document)) def list_all_my_buckets(self, headers={}): return ListAllMyBucketsResponse(self._make_request('GET', '', '', {}, headers)) def get_bucket_location(self, bucket): return LocationResponse(self._make_request('GET', bucket, '', {'location' : None})) # end public methods def _make_request(self, method, bucket='', key='', query_args={}, headers={}, data='', metadata={}): server = '' if bucket == '': server = self.server elif self.calling_format == CallingFormat.SUBDOMAIN: server = "%s.%s" % (bucket, self.server) elif self.calling_format == CallingFormat.VANITY: server = bucket else: server = self.server path = '' if (bucket != '') and (self.calling_format == CallingFormat.PATH): path += "/%s" % bucket # add the slash after the bucket regardless # the key will be appended if it is non-empty path += "/%s" % urllib.quote_plus(key) # build the path_argument string # add the ? in all cases since # signature and credentials follow path args if len(query_args): path += "?" + query_args_hash_to_string(query_args) is_secure = self.is_secure host = "%s:%d" % (server, self.port) while True: if (is_secure): connection = httplib.HTTPSConnection(host) else: connection = httplib.HTTPConnection(host) final_headers = merge_meta(headers, metadata); # add auth header self._add_aws_auth_header(final_headers, method, bucket, key, query_args) connection.request(method, path, data, final_headers) resp = connection.getresponse() if resp.status < 300 or resp.status >= 400: return resp # handle redirect location = resp.getheader('location') if not location: return resp # (close connection) resp.read() scheme, host, path, params, query, fragment \ = urlparse.urlparse(location) if scheme == "http": is_secure = True elif scheme == "https": is_secure = False else: raise invalidURL("Not http/https: " + location) if query: path += "?" + query # retry with redirect def _add_aws_auth_header(self, headers, method, bucket, key, query_args): if not headers.has_key('Date'): headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) c_string = canonical_string(method, bucket, key, query_args, headers) headers['Authorization'] = \ "AWS %s:%s" % (self.aws_access_key_id, encode(self.aws_secret_access_key, c_string)) class QueryStringAuthGenerator: # by default, expire in 1 minute DEFAULT_EXPIRES_IN = 60 def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): if not port: port = PORTS_BY_SECURITY[is_secure] self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key if (is_secure): self.protocol = 'https' else: self.protocol = 'http' self.is_secure = is_secure self.server = server self.port = port self.calling_format = calling_format self.__expires_in = QueryStringAuthGenerator.DEFAULT_EXPIRES_IN self.__expires = None # for backwards compatibility with older versions self.server_name = "%s:%s" % (self.server, self.port) def set_expires_in(self, expires_in): self.__expires_in = expires_in self.__expires = None def set_expires(self, expires): self.__expires = expires self.__expires_in = None def create_bucket(self, bucket, headers={}): return self.generate_url('PUT', bucket, '', {}, headers) def list_bucket(self, bucket, options={}, headers={}): return self.generate_url('GET', bucket, '', options, headers) def delete_bucket(self, bucket, headers={}): return self.generate_url('DELETE', bucket, '', {}, headers) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) return self.generate_url( 'PUT', bucket, key, {}, merge_meta(headers, object.metadata)) def get(self, bucket, key, headers={}): return self.generate_url('GET', bucket, key, {}, headers) def delete(self, bucket, key, headers={}): return self.generate_url('DELETE', bucket, key, {}, headers) def get_bucket_logging(self, bucket, headers={}): return self.generate_url('GET', bucket, '', { 'logging': None }, headers) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): return self.generate_url('PUT', bucket, '', { 'logging': None }, headers) def get_bucket_acl(self, bucket, headers={}): return self.get_acl(bucket, '', headers) def get_acl(self, bucket, key='', headers={}): return self.generate_url('GET', bucket, key, { 'acl': None }, headers) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): return self.put_acl(bucket, '', acl_xml_document, headers) # don't really care what the doc is here. def put_acl(self, bucket, key, acl_xml_document, headers={}): return self.generate_url('PUT', bucket, key, { 'acl': None }, headers) def list_all_my_buckets(self, headers={}): return self.generate_url('GET', '', '', {}, headers) def make_bare_url(self, bucket, key=''): full_url = self.generate_url(self, bucket, key) return full_url[:full_url.index('?')] def generate_url(self, method, bucket='', key='', query_args={}, headers={}): expires = 0 if self.__expires_in != None: expires = int(time.time() + self.__expires_in) elif self.__expires != None: expires = int(self.__expires) else: raise "Invalid expires state" canonical_str = canonical_string(method, bucket, key, query_args, headers, expires) encoded_canonical = encode(self.aws_secret_access_key, canonical_str) url = CallingFormat.build_url_base(self.protocol, self.server, self.port, bucket, self.calling_format) url += "/%s" % urllib.quote_plus(key) query_args['Signature'] = encoded_canonical query_args['Expires'] = expires query_args['AWSAccessKeyId'] = self.aws_access_key_id url += "?%s" % query_args_hash_to_string(query_args) return url class S3Object: def __init__(self, data, metadata={}): self.data = data self.metadata = metadata class Owner: def __init__(self, id='', display_name=''): self.id = id self.display_name = display_name class ListEntry: def __init__(self, key='', last_modified=None, etag='', size=0, storage_class='', owner=None): self.key = key self.last_modified = last_modified self.etag = etag self.size = size self.storage_class = storage_class self.owner = owner class CommonPrefixEntry: def __init(self, prefix=''): self.prefix = prefix class Bucket: def __init__(self, name='', creation_date=''): self.name = name self.creation_date = creation_date class Response: def __init__(self, http_response): self.http_response = http_response # you have to do this read, even if you don't expect a body. # otherwise, the next request fails. self.body = http_response.read() if http_response.status >= 300 and self.body: self.message = self.body else: self.message = "%03d %s" % (http_response.status, http_response.reason) class ListBucketResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) if http_response.status < 300: handler = ListBucketHandler() xml.sax.parseString(self.body, handler) self.entries = handler.entries self.common_prefixes = handler.common_prefixes self.name = handler.name self.marker = handler.marker self.prefix = handler.prefix self.is_truncated = handler.is_truncated self.delimiter = handler.delimiter self.max_keys = handler.max_keys self.next_marker = handler.next_marker else: self.entries = [] class ListAllMyBucketsResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) if http_response.status < 300: handler = ListAllMyBucketsHandler() xml.sax.parseString(self.body, handler) self.entries = handler.entries else: self.entries = [] class GetResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) response_headers = http_response.msg # older pythons don't have getheaders metadata = self.get_aws_metadata(response_headers) self.object = S3Object(self.body, metadata) def get_aws_metadata(self, headers): metadata = {} for hkey in headers.keys(): if hkey.lower().startswith(METADATA_PREFIX): metadata[hkey[len(METADATA_PREFIX):]] = headers[hkey] del headers[hkey] return metadata class LocationResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) if http_response.status < 300: handler = LocationHandler() xml.sax.parseString(self.body, handler) self.location = handler.location class ListBucketHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None self.curr_text = '' self.common_prefixes = [] self.curr_common_prefix = None self.name = '' self.marker = '' self.prefix = '' self.is_truncated = False self.delimiter = '' self.max_keys = 0 self.next_marker = '' self.is_echoed_prefix_set = False def startElement(self, name, attrs): if name == 'Contents': self.curr_entry = ListEntry() elif name == 'Owner': self.curr_entry.owner = Owner() elif name == 'CommonPrefixes': self.curr_common_prefix = CommonPrefixEntry() def endElement(self, name): if name == 'Contents': self.entries.append(self.curr_entry) elif name == 'CommonPrefixes': self.common_prefixes.append(self.curr_common_prefix) elif name == 'Key': self.curr_entry.key = self.curr_text elif name == 'LastModified': self.curr_entry.last_modified = self.curr_text elif name == 'ETag': self.curr_entry.etag = self.curr_text elif name == 'Size': self.curr_entry.size = int(self.curr_text) elif name == 'ID': self.curr_entry.owner.id = self.curr_text elif name == 'DisplayName': self.curr_entry.owner.display_name = self.curr_text elif name == 'StorageClass': self.curr_entry.storage_class = self.curr_text elif name == 'Name': self.name = self.curr_text elif name == 'Prefix' and self.is_echoed_prefix_set: self.curr_common_prefix.prefix = self.curr_text elif name == 'Prefix': self.prefix = self.curr_text self.is_echoed_prefix_set = True elif name == 'Marker': self.marker = self.curr_text elif name == 'IsTruncated': self.is_truncated = self.curr_text == 'true' elif name == 'Delimiter': self.delimiter = self.curr_text elif name == 'MaxKeys': self.max_keys = int(self.curr_text) elif name == 'NextMarker': self.next_marker = self.curr_text self.curr_text = '' def characters(self, content): self.curr_text += content class ListAllMyBucketsHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None self.curr_text = '' def startElement(self, name, attrs): if name == 'Bucket': self.curr_entry = Bucket() def endElement(self, name): if name == 'Name': self.curr_entry.name = self.curr_text elif name == 'CreationDate': self.curr_entry.creation_date = self.curr_text elif name == 'Bucket': self.entries.append(self.curr_entry) def characters(self, content): self.curr_text = content class LocationHandler(xml.sax.ContentHandler): def __init__(self): self.location = None self.state = 'init' def startElement(self, name, attrs): if self.state == 'init': if name == 'LocationConstraint': self.state = 'tag_location' self.location = '' else: self.state = 'bad' else: self.state = 'bad' def endElement(self, name): if self.state == 'tag_location' and name == 'LocationConstraint': self.state = 'done' else: self.state = 'bad' def characters(self, content): if self.state == 'tag_location': self.location += content PenguinTV-4.2.0/penguintv/amazon/__init__.py0000644000000000000000000000000010760322002015655 0ustar PenguinTV-4.2.0/penguintv/TagEditorNG.py0000644000000000000000000002352211072501076014765 0ustar # (C) 2007 Owen Williams # # A new, more intuitive tag editor import penguintv import gtk import utils class TagEditorNG: FEEDID = 0 TITLE = 1 TAGGED = 2 SEPARATOR = 3 NEWLY_TOGGLED = 4 def __init__(self, xml, app): self._xml = xml self._app = app self._current_tag = None self._app.connect("feed-added", self.__feed_added_cb) self._app.connect("feed-removed", self.__feed_removed_cb) self._app.connect("tags-changed", self.__tags_changed_cb) def show(self): self._window = self._xml.get_widget("dialog_tag_editor_ng") self._window.set_transient_for(self._app.main_window.get_parent()) for key in dir(self.__class__): if key[:3] == '_on': self._xml.signal_connect(key, getattr(self,key)) self._feeds_widget = self._xml.get_widget("treeview_feeds") self._feeds_model = gtk.ListStore(int, str, bool, bool, bool) #feed_id, title, tagged, separator, newly toggled self._feeds_widget.set_row_separator_func(lambda m,i:m[i][self.SEPARATOR] == True) self._sorted_model = gtk.TreeModelSort(self._feeds_model) def feed_sort_func(model, i1, i2): #use lists to not affect actual values r1 = list(model[i1]) r2 = list(model[i2]) #if either is newly selected, treat as unchecked for sorting if r1[self.NEWLY_TOGGLED] == True: r1[self.TAGGED] = not r1[self.TAGGED] if r2[self.NEWLY_TOGGLED] == True: r2[self.TAGGED] = not r2[self.TAGGED] #test separator if r1[self.SEPARATOR] == True: if r2[self.TAGGED]: return -1 else: return 1 if r2[self.SEPARATOR] == True: if r1[self.TAGGED]: return 1 else: return -1 #test checkboxes if r1[self.TAGGED] != r2[self.TAGGED]: return r1[self.TAGGED] - r2[self.TAGGED] #correct for weird bug if r1[self.TITLE] is None: r1[self.TITLE] = "" if r2[self.TITLE] is None: r2[self.TITLE] = "" #sort by name if r1[self.TITLE].upper() < r2[self.TITLE].upper(): return 1 elif r1[self.TITLE].upper() == r2[self.TITLE].upper(): return 0 return -1 self._sorted_model.set_sort_func(0, feed_sort_func) self._sorted_model.set_sort_column_id(0, gtk.SORT_DESCENDING) self._feeds_widget.set_model(self._sorted_model) renderer = gtk.CellRendererToggle() feed_column = gtk.TreeViewColumn('') feed_column.pack_start(renderer, True) self._feeds_widget.append_column(feed_column) feed_column.set_attributes(renderer, active=2) renderer.connect('toggled', self._feed_toggled) renderer = gtk.CellRendererText() feed_column = gtk.TreeViewColumn('Feeds') feed_column.pack_start(renderer, True) feed_column.set_attributes(renderer, markup=1) self._feeds_widget.append_column(feed_column) self._tags_widget = self._xml.get_widget("treeview_tags") tags_model = gtk.ListStore(str) #tag self._tags_widget.set_model(tags_model) renderer = gtk.CellRendererText() renderer.set_property('editable', True) renderer.connect('edited', self._tag_name_edited) tag_column = gtk.TreeViewColumn('Tags') tag_column.pack_start(renderer, True) tag_column.set_attributes(renderer, markup=0) self._tags_widget.append_column(tag_column) self._tags_widget.get_selection().connect('changed', self._tags_widget_changed) pane = self._xml.get_widget("hpaned1") pane.set_position(200) if utils.RUNNING_HILDON: self._window.resize(650,300) else: self._window.resize(500,600) self._window.show() self._populate_lists() def __feed_added_cb(self, app, a, b): self._populate_lists() def __feed_removed_cb(self, app, a): self._populate_lists() def __tags_changed_cb(self, app, val): # if we initiated the change we set val=1 # the app sets val=0 if val != 1: self._populate_lists() def _populate_lists(self): self._feeds_model.clear() for feed_id, title, url in self._app.db.get_feedlist(): self._feeds_model.append([feed_id, title, False, False, False]) self._feeds_model.append([-1, "None", False, True, False]) model = self._tags_widget.get_model() model.clear() for tag, favorite in self._app.db.get_all_tags(): model.append([tag]) def _tags_widget_changed(self, event): tags_model = self._tags_widget.get_model() selected = self._tags_widget.get_selection().get_selected() try: self._current_tag = tags_model[selected[1]][0] tagged_feeds = self._app.db.get_feeds_for_tag(self._current_tag) except: self._current_tag = None tagged_feeds = [] for row in self._feeds_model: #reset "newly selected" feeds row[self.NEWLY_TOGGLED] = False row[self.TAGGED] = row[self.FEEDID] in tagged_feeds def _feed_toggled(self, obj, path): if self._current_tag is None: return path = self._sorted_model.convert_path_to_child_path(path) row = self._feeds_model[path] row[self.TAGGED] = not row[self.TAGGED] row[self.NEWLY_TOGGLED] = not row[self.NEWLY_TOGGLED] if row[self.TAGGED]: self._app.db.add_tag_for_feed(row[self.FEEDID], self._current_tag) self._app.emit('tags-changed', 1) else: self._app.db.remove_tag_from_feed(row[self.FEEDID], self._current_tag) self._app.emit('tags-changed', 1) def _on_button_rename_clicked(self, event): if self._current_tag is None: return # pop up a dialog to rename the current tag dialog = gtk.Dialog(title=_("Rename Tag"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Please enter a new name for this tag")) dialog.vbox.pack_start(label, True, True, 0) entry = gtk.Entry() dialog.vbox.pack_start(entry) dialog.show_all() response = dialog.run() dialog.hide() del dialog if response == gtk.RESPONSE_ACCEPT: # rename this item new_name = entry.get_text() self._rename_tag(self._current_tag, new_name) def _tag_name_edited(self, renderer, path, new_text): #FIXME: do we need to check if the new_name already exists? model = self._tags_widget.get_model() self._rename_tag(model[path][0], new_text) def _rename_tag(self, old_name, new_name): #FIXME: do we need to check if the new_name already exists? self._app.db.rename_tag(old_name, new_name) self._app.emit('tags-changed', 1) # resort selection = self._tags_widget.get_selection() model, old_iter = selection.get_selected() model.remove(old_iter) self._current_tag = new_name new_index = -1 i = -1 for row in model: i += 1 if new_name.upper() < row[0].upper(): new_index = i break if new_index == -1: new_index = len(model) - 1 model.insert(new_index,[new_name]) new_iter = model.get_iter((new_index,)) self._tags_widget.scroll_to_cell((new_index,)) selection.select_path((new_index,)) def _on_button_add_clicked(self, event): # pop up a dialog to ask for a name, and add it... how to deal with # a tag with no associated feed???? ... I don't think it even matters # because the tag will be "created" as soon as we check a box # pop up a dialog to rename the current tag dialog = gtk.Dialog(title=_("Rename Tag"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Please enter a name for this new tag:")) dialog.vbox.pack_start(label, True, True, 0) entry = gtk.Entry() dialog.vbox.pack_start(entry) dialog.show_all() response = dialog.run() dialog.hide() del dialog if response == gtk.RESPONSE_ACCEPT: # rename this item tag_name = entry.get_text() # add tag to our list model = self._tags_widget.get_model() selection = self._tags_widget.get_selection() self._current_tag = tag_name new_index = -1 i = -1 for row in model: i += 1 if tag_name.upper() < row[0].upper(): new_index = i break if new_index == -1: new_index = len(model) - 1 model.insert(new_index,[tag_name]) # select it new_iter = model.get_iter((new_index,)) self._tags_widget.scroll_to_cell((new_index,)) selection.select_iter(new_iter) def _on_button_remove_clicked(self, event): if self._current_tag is None: return dialog = gtk.Dialog(title=_("Really Delete Tag?"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) label = gtk.Label(_("Are you sure you want to remove this tag from all feeds?")) dialog.vbox.pack_start(label, True, True, 0) label.show() response = dialog.run() dialog.hide() del dialog if response == gtk.RESPONSE_ACCEPT: #remove from db self._app.db.remove_tag(self._current_tag) self._app.emit('tags-changed', 1) #remove tag from our list selection = self._tags_widget.get_selection() model, old_iter = selection.get_selected() model.remove(old_iter) self._current_tag = None #select nothing self._tags_widget.scroll_to_cell((0,)) selection.unselect_all() def _on_button_close_clicked(self, event): self.hide() def _on_editor_help_button_activate(self, event): dialog = gtk.Dialog(title=_("Tag Editor Help"), parent=None, flags=gtk.DIALOG_MODAL, buttons=(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)) hbox = gtk.HBox() hbox.set_spacing(12) image = gtk.image_new_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_DIALOG) hbox.pack_start(image, False, False, 12) label = gtk.Label(_("""Select a tag in the list on the left, and all the feeds with that tag will be marked on the right. You may mark and unmark feeds to add or remove that tag from them. Tagged feeds will appear at the top of the list.""")) label.set_line_wrap(True) hbox.pack_start(label, True, True, 0) dialog.vbox.pack_start(hbox, True, True, 0) dialog.show_all() dialog.resize(400,-1) response = dialog.run() dialog.hide() del dialog def on_dialog_tag_editor_ng_destroy_event(self, data1, data2): self.hide() def on_dialog_tag_editor_ng_delete_event(self, data1, data2): return self._window.hide_on_delete() def hide(self): self._window.hide()