streamtuner2/action.py 0100644 0001750 0001750 00000024007 11440240124 0014666 0 ustar 00takaki takaki #
# encoding: UTF-8
# api: streamtuner2
# type: functions
# title: play/record actions
# description: Starts audio applications, guesses MIME types for URLs
#
#
# Multimedia interface for starting audio players or browser.
#
#
# Each channel plugin has a .listtype which describes the linked
# audio playlist format. It's audio/x-scpls mostly, seldomly m3u,
# but sometimes url/direct if the entry[url] directly leads to the
# streaming server.
# As fallback there is a regex which just looks for URLs in the
# given resource (works for m3u/pls/xspf/asx/...). There is no
# actual url "filename" extension guessing.
#
#
#
import re
import os
import http
from config import conf
import platform
#from channels import __print__
def __print__(*args):
if conf.debug:
print " ".join([str(a) for a in args])
main = None
#-- media actions ---------------------------------------------
#
# implements "play" and "record" methods,
# but also "browser" for web URLs
#
class action:
# streamlink formats
lt = {"asx":"video/x-ms-asf", "pls":"audio/x-scpls", "m3u":"audio/x-mpegurl", "xspf":"application/xspf+xml", "href":"url/http", "ram":"audio/x-pn-realaudio", "smil":"application/smil"}
# media formats
mf = {"mp3":"audio/mp3", "ogg":"audio/ogg", "aac":"audio/aac"}
# web
@staticmethod
def browser(url):
__print__( conf.browser )
os.system(conf.browser + " '" + action.quote(url) + "' &")
# os shell cmd escaping
@staticmethod
def quote(s):
return "%r" % s
# calls player for stream url and format
@staticmethod
def play(url, audioformat="audio/mp3", listformat="text/x-href"):
if (url):
url = action.url(url, listformat)
if (audioformat):
if audioformat == "audio/mpeg":
audioformat = "audio/mp3" # internally we use the more user-friendly moniker
cmd = conf.play.get(audioformat, conf.play.get("*/*", "vlc %u"))
__print__( "play", url, cmd )
try:
action.run( action.interpol(cmd, url) )
except:
pass
@staticmethod
def run(cmd):
__print__( cmd )
print cmd
os.system(cmd + (" &" if platform.system()!="Windows" else ""))
# streamripper
@staticmethod
def record(url, audioformat="audio/mp3", listformat="text/x-href", append="", row={}):
__print__( "record", url )
cmd = conf.record.get(audioformat, conf.record.get("*/*", None))
try: action.run( action.interpol(cmd, url, row) + append )
except: pass
# save as .m3u
@staticmethod
def save(row, fn, listformat="audio/x-scpls"):
# modify stream url
row["url"] = action.url(row["url"], listformat)
stream_urls = action.extract_urls(row["url"], listformat)
# output format
if (re.search("\.m3u", fn)):
txt = "#M3U\n"
for url in stream_urls:
txt += http.fix_url(url) + "\n"
# output format
elif (re.search("\.pls", fn)):
txt = "[playlist]\n" + "numberofentries=1\n"
for i,u in enumerate(stream_urls):
i = str(i + 1)
txt += "File"+i + "=" + u + "\n"
txt += "Title"+i + "=" + row["title"] + "\n"
txt += "Length"+i + "=-1\n"
txt += "Version=2\n"
# output format
elif (re.search("\.xspf", fn)):
txt = '' + "\n"
txt += '' + "\n"
txt += '' + "\n"
for attr,tag in [("title","title"), ("homepage","info"), ("playing","annotation"), ("description","annotation")]:
if row.get(attr):
txt += " <"+tag+">" + xmlentities(row[attr]) + ""+tag+">\n"
txt += " \n"
for u in stream_urls:
txt += ' ' + "\n"
txt += " \n\n"
# output format
elif (re.search("\.json", fn)):
row["stream_urls"] = stream_urls
txt = str(row) # pseudo-json (python format)
# output format
elif (re.search("\.asx", fn)):
txt = "\n" \
+ " " + xmlentities(row["title"]) + "\n" \
+ " \n" \
+ " " + xmlentities(row["title"]) + "\n" \
+ " \n" \
+ " \n" \
+ " \n\n"
# output format
elif (re.search("\.smil", fn)):
txt = "\n\n \n\n" \
+ "\n \n \n \n\n\n"
# unknown
else:
txt = ""
# write
if txt:
f = open(fn, "wb")
f.write(txt)
f.close()
pass
# replaces instances of %u, %l, %pls with urls / %g, %f, %s, %m, %m3u or local filenames
@staticmethod
def interpol(cmd, url, row={}):
# inject other meta fields
if row:
for field in row:
cmd = cmd.replace("%"+field, "%r" % row.get(field))
# add default if cmd has no %url placeholder
if cmd.find("%") < 0:
cmd = cmd + " %m3u"
# standard placeholders
if (re.search("%(url|pls|[ulr])", cmd)):
cmd = re.sub("%(url|pls|[ulr])", action.quote(url), cmd)
if (re.search("%(m3u|[fgm])", cmd)):
cmd = re.sub("%(m3u|[fgm])", action.quote(action.m3u(url)), cmd)
if (re.search("%(srv|[ds])", cmd)):
cmd = re.sub("%(srv|[ds])", action.quote(action.srv(url)), cmd)
return cmd
# eventually transforms internal URN/IRI to URL
@staticmethod
def url(url, listformat):
if (listformat == "audio/x-scpls"):
url = url
elif (listformat == "text/x-urn-streamtuner2-script"):
url = main.special.stream_url(url)
else:
url = url
return url
# download a .pls resource and extract urls
@staticmethod
def pls(url):
text = http.get(url)
__print__( "pls_text=", text )
return re.findall("\s*File\d*\s*=\s*(\w+://[^\s]+)", text, re.I)
# currently misses out on the titles
# get a single direct ICY stream url (extract either from PLS or M3U)
@staticmethod
def srv(url):
return action.extract_urls(url)[0]
# retrieve real stream urls from .pls or .m3u links
@staticmethod
def extract_urls(pls, listformat="__not_used_yet__"):
# extract stream address from .pls URL
if (re.search("\.pls", pls)): #audio/x-scpls
return action.pls(pls)
elif (re.search("\.asx", pls)): #video/x-ms-asf
return re.findall(" 3 and stream_id != "XXXXXX")
# check if there are any urls in a given file
@staticmethod
def has_urls(tmp_fn):
if os.path.exists(tmp_fn):
return open(tmp_fn, "r").read().find("http://") > 0
# create a local .m3u file from it
@staticmethod
def m3u(pls):
# temp filename
(tmp_fn, unique) = action.tmp_fn(pls)
# does it already exist?
if tmp_fn and unique and conf.reuse_m3u and action.has_urls(tmp_fn):
return tmp_fn
# download PLS
__print__( "pls=",pls )
url_list = action.extract_urls(pls)
__print__( "urls=", url_list )
# output URL list to temporary .m3u file
if (len(url_list)):
#tmp_fn =
f = open(tmp_fn, "w")
f.write("#M3U\n")
f.write("\n".join(url_list) + "\n")
f.close()
# return path/name of temporary file
return tmp_fn
else:
__print__( "error, there were no URLs in ", pls )
raise "Empty PLS"
# open help browser
@staticmethod
def help(*args):
os.system("yelp /usr/share/doc/streamtuner2/help/ &")
#or action.browser("/usr/share/doc/streamtuner2/")
#class action
streamtuner2/cli.py 0100644 0001750 0001750 00000010512 11427035000 0014154 0 ustar 00takaki takaki #
# api: streamtuner2
# title: CLI interface
# description: allows to call streamtuner2 from the commandline
# status: experimental
# version: 0.3
#
# Returns JSON data when queried. Usually returns cache data, but the
# "category" module always loads fresh info from the directory servers.
#
# Not all channel plugins are gtk-free yet. And some workarounds are
# used here to not upset channel plugins about a missing parent window.
#
#
#
import sys
#from channels import *
import http
import action
from config import conf
import json
# CLI
class StreamTunerCLI (object):
# plugin info
title = "CLI interface"
version = 0.3
# channel plugins
channel_modules = ["shoutcast", "xiph", "internet_radio_org_uk", "jamendo", "myoggradio", "live365"]
current_channel = "cli"
plugins = {} # only populated sparsely by .stream()
# start
def __init__(self):
# fake init
action.action.main = empty_parent()
action.action.main.current_channel = self.current_channel
# check if enough arguments, else help
if len(sys.argv)<3:
a = self.help
# first cmdline arg == action
else:
command = sys.argv[1]
a = self.__getattribute__(command)
# run
result = a(*sys.argv[2:])
if result:
self.json(result)
# show help
def help(self, *args):
print """
syntax: streamtuner2 action [channel] "stream title"
from cache:
streamtuner2 stream shoutcast frequence
streamtuner2 dump xiph
streamtuner2 play "..."
streamtuner2 url "..."
load fresh:
streamtuner2 category shoutcast "Top 40"
streamtuner2 categories xiph
"""
# prints stream data from cache
def stream(self, *args):
# optional channel name, title
if len(args) > 1:
(channel_list, title) = args
channel_list = channel_list.split(",")
else:
title = list(args).pop()
channel_list = self.channel_modules
# walk through channel plugins, categories, rows
title = title.lower()
for channel in channel_list:
self.current_channel = channel
c = self.channel(channel)
self.plugins[channel] = c
c.cache()
for cat in c.streams:
for row in c.streams[cat]:
if row and row.get("title","").lower().find(title)>=0:
return(row)
# just get url
def url(self, *args):
row = self.stream(*args)
if row.get("url"):
print row["url"]
# run player
def play(self, *args):
row = self.stream(*args)
if row.get("url"):
#action.action.play(row["url"], audioformat=row.get("format","audio/mp3"))
self.plugins[self.current_channel].play(row)
# return cache data 1:1
def dump(self, channel):
c = self.channel(channel)
c.cache()
return c.streams
# load from server
def category(self, module, cat):
c = self.channel(module)
r = c.update_streams(cat)
[c.postprocess(row) for row in r]
return r
# load from server
def categories(self, module):
c = self.channel(module)
c.cache()
r = c.update_categories()
if not r:
r = c.categories
if c.__dict__.get("empty"):
del r[0]
return r
# load module
def channel(self, module):
plugin = __import__("channels."+module, None, None, [""])
plugin_class = plugin.__dict__[module]
p = plugin_class(None)
p.parent = empty_parent()
return p
# load all channel modules
def channels(self, channels=None):
if channels:
channels = channels.split(",")
else:
channels = self.channel_modules
return (self.channel(module) for module in channels)
# pretty print json
def json(self, dat):
print json.dumps(dat, sort_keys=True, indent=2)
# trap for some main window calls
class empty_parent (object):
channel = {}
null = lambda *a: None
status = null
thread = null
streamtuner2/config.py 0100644 0001750 0001750 00000013315 11430635126 0014667 0 ustar 00takaki takaki #
# encoding: UTF-8
# api: streamtuner2
# type: class
# title: global config object
# description: reads ~/.config/streamtuner/*.json files
#
# In the main application or module files which need access
# to a global conf object, just import this module as follows:
#
# from config import conf
#
# Here conf is already an instantiation of the underlying
# Config class.
#
import os
import sys
import pson
import gzip
#-- create a single instance of config object
conf = object()
#-- global configuration data ---------------------------------------------
class ConfigDict(dict):
# start
def __init__(self):
# object==dict means conf.var is conf["var"]
self.__dict__ = self # let's pray this won't leak memory due to recursion issues
# prepare
self.defaults()
self.xdg()
# runtime
dirs = ["/usr/share/streamtuner2", "/usr/local/share/streamtuner2", sys.path[0], "."]
self.share = [d for d in dirs if os.path.exists(d)][0]
# settings from last session
last = self.load("settings")
if (last):
self.update(last)
# store defaults in file
else:
self.save("settings")
self.firstrun = 1
# some defaults
def defaults(self):
self.browser = "sensible-browser"
self.play = {
"audio/mp3": "audacious ", # %u for url to .pls, %g for downloaded .m3u
"audio/ogg": "audacious ",
"audio/aac": "amarok -l ",
"audio/x-pn-realaudio": "vlc ",
"audio/*": "totem ",
"*/*": "vlc %srv",
}
self.record = {
"*/*": "x-terminal-emulator -e streamripper %srv",
}
self.plugins = {
"bookmarks": 1, # built-in plugins, cannot be disabled
"shoutcast": 1,
"punkcast": 0, # disable per default
}
self.tmp = os.environ.get("TEMP", "/tmp")
self.max_streams = "120"
self.show_bookmarks = 1
self.show_favicons = 1
self.load_favicon = 1
self.heuristic_bookmark_update = 1
self.retain_deleted = 1
self.auto_save_appstate = 1
self.theme = "" #"MountainDew"
self.debug = False
self.channel_order = "shoutcast, xiph, internet_radio_org_uk, jamendo, myoggradio, .."
self.reuse_m3u = 1
self.google_homepage = 1
# each plugin has a .config dict list, we add defaults here
def add_plugin_defaults(self, config, module=""):
# options
for opt in config:
if ("name" in opt) and ("value" in opt) and (opt["name"] not in vars(self)):
self.__dict__[opt["name"]] = opt["value"]
# plugin state
if module and module not in conf.plugins:
conf.plugins[module] = 1
# http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html
def xdg(self):
home = os.environ.get("HOME", self.tmp)
config = os.environ.get("XDG_CONFIG_HOME", home+"/.config")
# storage dir
self.dir = config + "/" + "streamtuner2"
# create if necessary
if (not os.path.exists(self.dir)):
os.makedirs(self.dir)
# store some configuration list/dict into a file
def save(self, name="settings", data=None, gz=0, nice=0):
name = name + ".json"
if (data == None):
data = dict(self.__dict__) # ANOTHER WORKAROUND: typecast to plain dict(), else json filter_data sees it as object and str()s it
nice = 1
# check for subdir
if (name.find("/") > 0):
subdir = name[0:name.find("/")]
subdir = self.dir + "/" + subdir
if (not os.path.exists(subdir)):
os.mkdir(subdir)
open(subdir+"/.nobackup", "w").close()
# write
file = self.dir + "/" + name
# .gz or normal file
if gz:
f = gzip.open(file+".gz", "w")
if os.path.exists(file):
os.unlink(file)
else:
f = open(file, "w")
# encode
pson.dump(data, f, indent=(4 if nice else None))
f.close()
# retrieve data from config file
def load(self, name):
name = name + ".json"
file = self.dir + "/" + name
try:
# .gz or normal file
if os.path.exists(file + ".gz"):
f = gzip.open(file + ".gz", "r")
elif os.path.exists(file):
f = open(file, "r")
else:
return # file not found
# decode
r = pson.load(f)
f.close()
return r
except (Exception), e:
print "PSON parsing error (in "+name+")", e
# recursive dict update
def update(self, with_new_data):
for key,value in with_new_data.iteritems():
if type(value) == dict:
self[key].update(value)
else:
self[key] = value
# descends into sub-dicts instead of wiping them with subkeys
#-- actually fill global conf instance
conf = ConfigDict()
streamtuner2/favicon.py 0100644 0001750 0001750 00000022605 11430610536 0015047 0 ustar 00takaki takaki #
# encoding: utf-8
# api: python
# title: favicon download
# description: retrieves favicons for station homepages, plus utility code for display preparation
# config:
#
#
#
# type: module
#
#
# This module fetches favicon.ico files and prepares .png images for each domain
# in the stations list. Homepage URLs are used for this.
#
# Files end up in:
# /home/user/.config/streamtuner2/icons/www.example.org.png
#
# Currently relies on Google conversion service, because urllib+PIL conversion
# method is still flaky, and a bit slower. Future version might use imagemagick.
#
always_google = 1 # use favicon service for speed
only_google = 1 # if that fails, try our other/slower methods?
delete_google_stub = 1 # don't keep placeholder images
google_placeholder_filesizes = (726,896)
import os, os.path
import urllib
import re
import urlparse
from config import conf
try: from processing import Process as Thread
except: from threading import Thread
import http
# ensure that we don't try to download a single favicon twice per session,
# if it's not available the first time, we won't get it after switching stations back and forth
tried_urls = []
# walk through entries
def download_all(entries):
t = Thread(target= download_thread, args= ([entries]))
t.start()
def download_thread(entries):
for e in entries:
# try just once
if e.get("homepage") in tried_urls:
pass
# retrieve specific img url as favicon
elif e.get("img"):
pass
# favicon from homepage URL
elif e.get("homepage"):
download(e["homepage"])
# remember
tried_urls.append(e.get("homepage"))
pass
# download a single favicon for currently playing station
def download_playing(row):
if conf.google_homepage and not row.get("homepage"):
google_find_homepage(row)
if conf.load_favicon and row.get("homepage"):
download_all([row])
pass
#--- unrelated ---
def google_find_homepage(row):
""" Searches for missing homepage URL via Google. """
if row.get("url") not in tried_urls:
tried_urls.append(row.get("url"))
rx_t = re.compile('^(([^-:]+.?){1,2})')
rx_u = re.compile('"(http://[^"]+)" class=l')
# extract first title parts
title = rx_t.search(row["title"])
if title:
title = title.group(0).replace(" ", "%20")
# do a google search
html = http.ajax("http://www.google.de/search?hl=de&q="+title, None)
# find first URL hit
url = rx_u.search(html)
if url:
row["homepage"] = http.fix_url(url.group(1))
pass
#-----------------
# extract domain name
def domain(url):
if url.startswith("http://"):
return url[7:url.find("/", 8)] # we assume our URLs are fixed already (http://example.org/ WITH trailing slash!)
else:
return "null"
# local filename
def name(url):
return domain(url) + ".png"
# local filename
def file(url):
icon_dir = conf.dir + "/icons"
if not os.path.exists(icon_dir):
os.mkdir(icon_dir)
open(icon_dir+"/.nobackup", "w").close()
return icon_dir + "/" + name(url)
# does the favicon exist
def available(url):
return os.path.exists(file(url))
# download favicon for given URL
def download(url):
# skip if .png for domain already exists
if available(url):
return
# fastest method, so default to google for now
if always_google:
google_ico2png(url)
if available(url) or only_google:
return
try: # look for /favicon.ico first
#print "favicon.ico"
direct_download("http://"+domain(url)+"/favicon.ico", file(url))
except:
try: # extract facicon filename from website
#print "html "
html_download(url)
except: # fallback
#print "google ico2png"
google_ico2png(url)
# retrieve PNG via Google ico2png
def google_ico2png(url):
#try:
GOOGLE = "http://www.google.com/s2/favicons?domain="
(fn, headers) = urllib.urlretrieve(GOOGLE+domain(url), file(url))
# test for stub image
if delete_google_stub and (filesize(fn) in google_placeholder_filesizes):
os.remove(fn)
def filesize(fn):
return os.stat(fn).st_size
# mime magic
def filetype(fn):
f = open(fn, "rb")
bin = f.read(4)
f.close()
if bin[1:3] == "PNG":
return "image/png"
else:
return "*/*"
# favicon.ico
def direct_download(favicon, fn):
# try:
# URL download
r = urllib.urlopen(favicon)
headers = r.info()
# abort on
if r.getcode() >= 300:
raise "HTTP error", r.getcode()
if not headers["Content-Type"].lower().find("image/"):
raise "can't use text/* content"
# save file
fn_tmp = fn+".tmp"
f = open(fn_tmp, "wb")
f.write(r.read(32768))
f.close()
# check type
if headers["Content-Type"].lower()=="image/png" and favicon.find(".png") and filetype(fn)=="image/png":
pngresize(fn_tmp)
os.mv(fn_tmp, fn)
else:
ico2png(fn_tmp, fn)
os.remove(fn_tmp)
# except:
# "File not found" and False
# peek at URL, download favicon.ico
def html_download(url):
#
#try:
# download html, look for @href in
r = urllib.urlopen(url)
html = r.read(4096)
r.close()
rx = re.compile("""]+rel\s*=\s*"?\s*(?:shortcut\s+|fav)?icon[^<>]+href=["'](?P[^<>"']+)["'<>\s].""")
favicon = "".join(rx.findall(html))
# url or
if favicon.startswith("http://"):
None
# just /pathname
else:
favicon = urlparse.urljoin(url, favicon)
#favicon = "http://" + domain(url) + "/" + favicon
# download
direct_download(favicon, file(url))
#
# title: workaround for PIL.Image to preserve the transparency for .ico import
#
# http://stackoverflow.com/questions/987916/how-to-determine-the-transparent-color-index-of-ico-image-with-pil
# http://djangosnippets.org/snippets/1287/
#
# Author: dc
# Posted: January 17, 2009
# Languag: Python
# Django Version: 1.0
# Tags: pil image ico
# Score: 2 (after 2 ratings)
#
import operator
import struct
try:
from PIL import BmpImagePlugin, PngImagePlugin, Image
except Exception, e:
print "no PIL", e
always_google = 1
only_google = 1
def load_icon(file, index=None):
'''
Load Windows ICO image.
See http://en.wikipedia.org/w/index.php?oldid=264332061 for file format
description.
'''
if isinstance(file, basestring):
file = open(file, 'rb')
try:
header = struct.unpack('<3H', file.read(6))
except:
raise IOError('Not an ICO file')
# Check magic
if header[:2] != (0, 1):
raise IOError('Not an ICO file')
# Collect icon directories
directories = []
for i in xrange(header[2]):
directory = list(struct.unpack('<4B2H2I', file.read(16)))
for j in xrange(3):
if not directory[j]:
directory[j] = 256
directories.append(directory)
if index is None:
# Select best icon
directory = max(directories, key=operator.itemgetter(slice(0, 3)))
else:
directory = directories[index]
# Seek to the bitmap data
file.seek(directory[7])
prefix = file.read(16)
file.seek(-16, 1)
if PngImagePlugin._accept(prefix):
# Windows Vista icon with PNG inside
image = PngImagePlugin.PngImageFile(file)
else:
# Load XOR bitmap
image = BmpImagePlugin.DibImageFile(file)
if image.mode == 'RGBA':
# Windows XP 32-bit color depth icon without AND bitmap
pass
else:
# Patch up the bitmap height
image.size = image.size[0], image.size[1] >> 1
d, e, o, a = image.tile[0]
image.tile[0] = d, (0, 0) + image.size, o, a
# Calculate AND bitmap dimensions. See
# http://en.wikipedia.org/w/index.php?oldid=264236948#Pixel_storage
# for description
offset = o + a[1] * image.size[1]
stride = ((image.size[0] + 31) >> 5) << 2
size = stride * image.size[1]
# Load AND bitmap
file.seek(offset)
string = file.read(size)
mask = Image.fromstring('1', image.size, string, 'raw',
('1;I', stride, -1))
image = image.convert('RGBA')
image.putalpha(mask)
return image
# convert .ico file to .png format
def ico2png(ico, png_fn):
#print "ico2png", ico, png, image
try: # .ico
image = load_icon(ico, None)
except: # automatic img file type guessing
image = Image.open(ico)
# resize
if image.size[0] > 16:
image.resize((16, 16), Image.ANTIALIAS)
# .png format
image.save(png_fn, "PNG", quality=98)
# resize an image
def pngresize(fn, x=16, y=16):
image = Image.open(fn)
if image.size[0] > x:
image.resize((x, y), Image.ANTIALIAS)
image.save(fn, "PNG", quality=98)
#-- test
if __name__ == "__main__":
import sys
download(sys.argv[1])
streamtuner2/http.py 0100644 0001750 0001750 00000013127 11425101121 0014365 0 ustar 00takaki takaki #
# encoding: UTF-8
# api: streamtuner2
# type: functions
# title: http download / methods
# description: http utility
# version: 1.3
#
# Provides a http GET method with gtk.statusbar() callback.
# And a function to add trailings slashes on http URLs.
#
# The latter code is pretty much unreadable. But let's put the
# blame on urllib2, the most braindamaged code in the Python
# standard library.
#
import urllib2
from urllib import urlencode
import config
from channels import __print__
#-- url download ---------------------------------------------
#-- chains to progress meter and status bar in main window
feedback = None
# sets either text or percentage, so may take two parameters
def progress_feedback(*args):
# use reset values if none given
if not args:
args = ["", 1.0]
# send to main win
if feedback:
try: [feedback(d) for d in args]
except: pass
#-- GET
def get(url, maxsize=1<<19, feedback="old"):
__print__("GET", url)
# statusbar info
progress_feedback(url, 0.0)
# read
content = ""
f = urllib2.urlopen(url)
max = 222000 # mostly it's 200K, but we don't get any real information
read_size = 1
# multiple steps
while (read_size and len(content) < maxsize):
# partial read
add = f.read(8192)
content = content + add
read_size = len(add)
# set progress meter
progress_feedback(float(len(content)) / float(max))
# done
# clean statusbar
progress_feedback()
# fin
__print__(len(content))
return content
#-- fix invalid URLs
def fix_url(url):
if url is None:
url = ""
if len(url):
# remove whitespace
url = url.strip()
# add scheme
if (url.find("://") < 0):
url = "http://" + url
# add mandatory path
if (url.find("/", 10) < 0):
url = url + "/"
return url
# default HTTP headers for AJAX/POST request
default_headers = {
"User-Agent": "streamtuner2/0.4 (X11; U; Linux AMD64; en; rv:1.5.0.1) like WinAmp/2.1 but not like Googlebot/2.1", #"Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6",
"Accept": "*/*;q=0.5, audio/*, url/*",
"Accept-Language": "en-US,en,de,es,fr,it,*;q=0.1",
"Accept-Encoding": "gzip,deflate",
"Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.1",
"Keep-Alive": "115",
"Connection": "keep-alive",
#"Content-Length", "56",
#"Cookie": "s_pers=%20s_getnr%3D1278607170446-Repeat%7C1341679170446%3B%20s_nrgvo%3DRepeat%7C1341679170447%3B; s_sess=%20s_cc%3Dtrue%3B%20s_sq%3Daolshtcst%252Caolsvc%253D%252526pid%25253Dsht%25252520%2525253A%25252520SHOUTcast%25252520Radio%25252520%2525257C%25252520Search%25252520Results%252526pidt%25253D1%252526oid%25253Dfunctiononclick%25252528event%25252529%2525257BshowMoreGenre%25252528%25252529%2525253B%2525257D%252526oidt%25253D2%252526ot%25253DDIV%3B; aolDemoChecked=1.849061",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
}
# simulate ajax calls
def ajax(url, post, referer=""):
# request
headers = default_headers
headers.update({
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"X-Requested-With": "XMLHttpRequest",
"Referer": (referer if referer else url),
})
if type(post) == dict:
post = urlencode(post)
request = urllib2.Request(url, post, headers)
# open url
__print__( vars(request) )
progress_feedback(url, 0.2)
r = urllib2.urlopen(request)
# get data
__print__( r.info() )
progress_feedback(0.5)
data = r.read()
progress_feedback()
return data
# http://techknack.net/python-urllib2-handlers/
from gzip import GzipFile
from StringIO import StringIO
class ContentEncodingProcessor(urllib2.BaseHandler):
"""A handler to add gzip capabilities to urllib2 requests """
# add headers to requests
def http_request(self, req):
req.add_header("Accept-Encoding", "gzip, deflate")
return req
# decode
def http_response(self, req, resp):
old_resp = resp
# gzip
if resp.headers.get("content-encoding") == "gzip":
gz = GzipFile(
fileobj=StringIO(resp.read()),
mode="r"
)
resp = urllib2.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
resp.msg = old_resp.msg
# deflate
if resp.headers.get("content-encoding") == "deflate":
gz = StringIO( deflate(resp.read()) )
resp = urllib2.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code) # 'class to add info() and geturl() methods to an open file.'
resp.msg = old_resp.msg
return resp
# deflate support
import zlib
def deflate(data): # zlib only provides the zlib compress format, not the deflate format;
try: # so on top of all there's this workaround:
return zlib.decompress(data, -zlib.MAX_WBITS)
except zlib.error:
return zlib.decompress(data)
#-- init for later use
if urllib2:
# config 1
handlers = [None, None, None]
# base
handlers[0] = urllib2.HTTPHandler()
if config.conf.debug:
handlers[0].set_http_debuglevel(3)
# content-encoding
handlers[1] = ContentEncodingProcessor()
# store cookies at runtime
import cookielib
cj = cookielib.CookieJar()
handlers[2] = urllib2.HTTPCookieProcessor( cj )
# inject into urllib2
urllib2.install_opener( urllib2.build_opener(*handlers) )
# alternative function names
AJAX=ajax
POST=ajax
GET=get
URL=fix_url
streamtuner2/kronos.py 0100644 0001750 0001750 00000046452 11422222555 0014744 0 ustar 00takaki takaki """Module that provides a cron-like task scheduler.
This task scheduler is designed to be used from inside your own program.
You can schedule Python functions to be called at specific intervals or
days. It uses the standard 'sched' module for the actual task scheduling,
but provides much more:
* repeated tasks (at intervals, or on specific days)
* error handling (exceptions in tasks don't kill the scheduler)
* optional to run scheduler in its own thread or separate process
* optional to run a task in its own thread or separate process
If the threading module is available, you can use the various Threaded
variants of the scheduler and associated tasks. If threading is not
available, you could still use the forked variants. If fork is also
not available, all processing is done in a single process, sequentially.
There are three Scheduler classes:
Scheduler ThreadedScheduler ForkedScheduler
You usually add new tasks to a scheduler using the add_interval_task or
add_daytime_task methods, with the appropriate processmethod argument
to select sequential, threaded or forked processing. NOTE: it is impossible
to add new tasks to a ForkedScheduler, after the scheduler has been started!
For more control you can use one of the following Task classes
and use schedule_task or schedule_task_abs:
IntervalTask ThreadedIntervalTask ForkedIntervalTask
SingleTask ThreadedSingleTask ForkedSingleTask
WeekdayTask ThreadedWeekdayTask ForkedWeekdayTask
MonthdayTask ThreadedMonthdayTask ForkedMonthdayTask
Kronos is the Greek God of Time.
Kronos scheduler (c) Irmen de Jong.
This version has been extracted from the Turbogears source repository
and slightly changed to be completely stand-alone again. Also some fixes
have been made to make it work on Python 2.6 (sched module changes).
The version in Turbogears is based on the original stand-alone Kronos.
This is open-source software, released under the MIT Software License:
http://www.opensource.org/licenses/mit-license.php
"""
__version__="2.0"
__all__ = [
"DayTaskRescheduler",
"ForkedIntervalTask",
"ForkedMonthdayTask",
"ForkedScheduler",
"ForkedSingleTask",
"ForkedTaskMixin",
"ForkedWeekdayTask",
"IntervalTask",
"MonthdayTask",
"Scheduler",
"SingleTask",
"Task",
"ThreadedIntervalTask",
"ThreadedMonthdayTask",
"ThreadedScheduler",
"ThreadedSingleTask",
"ThreadedTaskMixin",
"ThreadedWeekdayTask",
"WeekdayTask",
"add_interval_task",
"add_monthday_task",
"add_single_task",
"add_weekday_task",
"cancel",
"method",
]
import os
import sys
import sched
import time
import traceback
import weakref
class method:
sequential="sequential"
forked="forked"
threaded="threaded"
class Scheduler:
"""The Scheduler itself."""
def __init__(self):
self.running=True
self.sched = sched.scheduler(time.time, self.__delayfunc)
def __delayfunc(self, delay):
# This delay function is basically a time.sleep() that is
# divided up, so that we can check the self.running flag while delaying.
# there is an additional check in here to ensure that the top item of
# the queue hasn't changed
if delay<10:
time.sleep(delay)
else:
toptime = self._getqueuetoptime()
endtime = time.time() + delay
period = 5
stoptime = endtime - period
while self.running and stoptime > time.time() and \
self._getqueuetoptime() == toptime:
time.sleep(period)
if not self.running or self._getqueuetoptime() != toptime:
return
now = time.time()
if endtime > now:
time.sleep(endtime - now)
def _acquire_lock(self):
pass
def _release_lock(self):
pass
def add_interval_task(self, action, taskname, initialdelay, interval,
processmethod, args, kw):
"""Add a new Interval Task to the schedule.
A very short initialdelay or one of zero cannot be honored, you will
see a slight delay before the task is first executed. This is because
the scheduler needs to pick it up in its loop.
"""
if initialdelay < 0 or interval < 1:
raise ValueError("Delay or interval must be >0")
# Select the correct IntervalTask class. Not all types may be available!
if processmethod == method.sequential:
TaskClass = IntervalTask
elif processmethod == method.threaded:
TaskClass = ThreadedIntervalTask
elif processmethod == method.forked:
TaskClass = ForkedIntervalTask
else:
raise ValueError("Invalid processmethod")
if not args:
args = []
if not kw:
kw = {}
task = TaskClass(taskname, interval, action, args, kw)
self.schedule_task(task, initialdelay)
return task
def add_single_task(self, action, taskname, initialdelay, processmethod,
args, kw):
"""Add a new task to the scheduler that will only be executed once."""
if initialdelay < 0:
raise ValueError("Delay must be >0")
# Select the correct SingleTask class. Not all types may be available!
if processmethod == method.sequential:
TaskClass = SingleTask
elif processmethod == method.threaded:
TaskClass = ThreadedSingleTask
elif processmethod == method.forked:
TaskClass = ForkedSingleTask
else:
raise ValueError("Invalid processmethod")
if not args:
args = []
if not kw:
kw = {}
task = TaskClass(taskname, action, args, kw)
self.schedule_task(task, initialdelay)
return task
def add_daytime_task(self, action, taskname, weekdays, monthdays, timeonday,
processmethod, args, kw):
"""Add a new Day Task (Weekday or Monthday) to the schedule."""
if weekdays and monthdays:
raise ValueError("You can only specify weekdays or monthdays, "
"not both")
if not args:
args = []
if not kw:
kw = {}
if weekdays:
# Select the correct WeekdayTask class.
# Not all types may be available!
if processmethod == method.sequential:
TaskClass = WeekdayTask
elif processmethod == method.threaded:
TaskClass = ThreadedWeekdayTask
elif processmethod == method.forked:
TaskClass = ForkedWeekdayTask
else:
raise ValueError("Invalid processmethod")
task=TaskClass(taskname, weekdays, timeonday, action, args, kw)
if monthdays:
# Select the correct MonthdayTask class.
# Not all types may be available!
if processmethod == method.sequential:
TaskClass = MonthdayTask
elif processmethod == method.threaded:
TaskClass = ThreadedMonthdayTask
elif processmethod == method.forked:
TaskClass = ForkedMonthdayTask
else:
raise ValueError("Invalid processmethod")
task=TaskClass(taskname, monthdays, timeonday, action, args, kw)
firsttime=task.get_schedule_time(True)
self.schedule_task_abs(task, firsttime)
return task
def schedule_task(self, task, delay):
"""Add a new task to the scheduler with the given delay (seconds).
Low-level method for internal use.
"""
if self.running:
# lock the sched queue, if needed
self._acquire_lock()
try:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),) )
finally:
self._release_lock()
else:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),) )
def schedule_task_abs(self, task, abstime):
"""Add a new task to the scheduler for the given absolute time value.
Low-level method for internal use.
"""
if self.running:
# lock the sched queue, if needed
self._acquire_lock()
try:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),) )
finally:
self._release_lock()
else:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),) )
def start(self):
"""Start the scheduler."""
self._run()
def stop(self):
"""Remove all pending tasks and stop the Scheduler."""
self.running = False
self._clearschedqueue()
def cancel(self, task):
"""Cancel given scheduled task."""
self.sched.cancel(task.event)
if sys.version_info>=(2,6):
# code for sched module of python 2.6+
def _getqueuetoptime(self):
return self.sched._queue[0].time
def _clearschedqueue(self):
self.sched._queue[:] = []
else:
# code for sched module of python 2.5 and older
def _getqueuetoptime(self):
return self.sched.queue[0][0]
def _clearschedqueue(self):
self.sched.queue[:] = []
def _run(self):
# Low-level run method to do the actual scheduling loop.
while self.running:
try:
self.sched.run()
except Exception,x:
print >>sys.stderr, "ERROR DURING SCHEDULER EXECUTION",x
print >>sys.stderr, "".join(
traceback.format_exception(*sys.exc_info()))
print >>sys.stderr, "-" * 20
# queue is empty; sleep a short while before checking again
if self.running:
time.sleep(5)
class Task:
"""Abstract base class of all scheduler tasks"""
def __init__(self, name, action, args, kw):
"""This is an abstract class!"""
self.name=name
self.action=action
self.args=args
self.kw=kw
def __call__(self, schedulerref):
"""Execute the task action in the scheduler's thread."""
try:
self.execute()
except Exception,x:
self.handle_exception(x)
self.reschedule(schedulerref())
def reschedule(self, scheduler):
"""This method should be defined in one of the sub classes!"""
raise NotImplementedError("You're using the abstract base class 'Task',"
" use a concrete class instead")
def execute(self):
"""Execute the actual task."""
self.action(*self.args, **self.kw)
def handle_exception(self, exc):
"""Handle any exception that occured during task execution."""
print >>sys.stderr, "ERROR DURING TASK EXECUTION", exc
print >>sys.stderr, "".join(traceback.format_exception(*sys.exc_info()))
print >>sys.stderr, "-" * 20
class SingleTask(Task):
"""A task that only runs once."""
def reschedule(self, scheduler):
pass
class IntervalTask(Task):
"""A repeated task that occurs at certain intervals (in seconds)."""
def __init__(self, name, interval, action, args=None, kw=None):
Task.__init__(self, name, action, args, kw)
self.interval = interval
def reschedule(self, scheduler):
"""Reschedule this task according to its interval (in seconds)."""
scheduler.schedule_task(self, self.interval)
class DayTaskRescheduler:
"""A mixin class that contains the reschedule logic for the DayTasks."""
def __init__(self, timeonday):
self.timeonday = timeonday
def get_schedule_time(self, today):
"""Calculate the time value at which this task is to be scheduled."""
now = list(time.localtime())
if today:
# schedule for today. let's see if that is still possible
if (now[3], now[4]) >= self.timeonday:
# too bad, it will be tomorrow
now[2] += 1
else:
# tomorrow
now[2] += 1
# set new time on day (hour,minute)
now[3], now[4] = self.timeonday
# seconds
now[5] = 0
return time.mktime(now)
def reschedule(self, scheduler):
"""Reschedule this task according to the daytime for the task.
The task is scheduled for tomorrow, for the given daytime.
"""
# (The execute method in the concrete Task classes will check
# if the current day is a day on which the task must run).
abstime = self.get_schedule_time(False)
scheduler.schedule_task_abs(self, abstime)
class WeekdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a week (1-7), at a fixed time
on the day.
"""
def __init__(self, name, weekdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list, tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(weekdays) not in (list, tuple):
raise TypeError("weekdays must be a sequence of weekday numbers "
"1-7 (1 is Monday)")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days = weekdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the week).
weekday = time.localtime().tm_wday + 1
if weekday in self.days:
self.action(*self.args, **self.kw)
class MonthdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a month (1-31), at a fixed
time on the day.
"""
def __init__(self, name, monthdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list, tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(monthdays) not in (list, tuple):
raise TypeError("monthdays must be a sequence of monthdays numbers "
"1-31")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days = monthdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the month).
if time.localtime().tm_mday in self.days:
self.action(*self.args, **self.kw)
try:
import threading
class ThreadedScheduler(Scheduler):
"""A Scheduler that runs in its own thread."""
def __init__(self):
Scheduler.__init__(self)
# we require a lock around the task queue
self._lock = threading.Lock()
def start(self):
"""Splice off a thread in which the scheduler will run."""
self.thread = threading.Thread(target=self._run)
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
"""Stop the scheduler and wait for the thread to finish."""
Scheduler.stop(self)
try:
self.thread.join()
except AttributeError:
pass
def _acquire_lock(self):
"""Lock the thread's task queue."""
self._lock.acquire()
def _release_lock(self):
"""Release the lock on th ethread's task queue."""
self._lock.release()
class ThreadedTaskMixin:
"""A mixin class to make a Task execute in a separate thread."""
def __call__(self, schedulerref):
"""Execute the task action in its own thread."""
threading.Thread(target=self.threadedcall).start()
self.reschedule(schedulerref())
def threadedcall(self):
# This method is run within its own thread, so we have to
# do the execute() call and exception handling here.
try:
self.execute()
except Exception,x:
self.handle_exception(x)
class ThreadedIntervalTask(ThreadedTaskMixin, IntervalTask):
"""Interval Task that executes in its own thread."""
pass
class ThreadedSingleTask(ThreadedTaskMixin, SingleTask):
"""Single Task that executes in its own thread."""
pass
class ThreadedWeekdayTask(ThreadedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own thread."""
pass
class ThreadedMonthdayTask(ThreadedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own thread."""
pass
except ImportError:
# threading is not available
pass
if hasattr(os, "fork"):
import signal
class ForkedScheduler(Scheduler):
"""A Scheduler that runs in its own forked process."""
def __del__(self):
if hasattr(self, "childpid"):
os.kill(self.childpid, signal.SIGKILL)
def start(self):
"""Fork off a new process in which the scheduler will run."""
pid = os.fork()
if pid == 0:
# we are the child
signal.signal(signal.SIGUSR1, self.signalhandler)
self._run()
os._exit(0)
else:
# we are the parent
self.childpid = pid
# can no longer insert in the scheduler queue
del self.sched
def stop(self):
"""Stop the scheduler and wait for the process to finish."""
os.kill(self.childpid, signal.SIGUSR1)
os.waitpid(self.childpid, 0)
def signalhandler(self, sig, stack):
Scheduler.stop(self)
class ForkedTaskMixin:
"""A mixin class to make a Task execute in a separate process."""
def __call__(self, schedulerref):
"""Execute the task action in its own process."""
pid = os.fork()
if pid == 0:
# we are the child
try:
self.execute()
except Exception,x:
self.handle_exception(x)
os._exit(0)
else:
# we are the parent
self.reschedule(schedulerref())
class ForkedIntervalTask(ForkedTaskMixin, IntervalTask):
"""Interval Task that executes in its own process."""
pass
class ForkedSingleTask(ForkedTaskMixin, SingleTask):
"""Single Task that executes in its own process."""
pass
class ForkedWeekdayTask(ForkedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own process."""
pass
class ForkedMonthdayTask(ForkedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own process."""
pass
if __name__=="__main__":
def testaction(arg):
print ">>>TASK",arg,"sleeping 3 seconds"
time.sleep(3)
print "<<