lv2fil-2.0+20100312.git18130f5a+dfsg0/0000755000175000017500000000000011703032664016071 5ustar alessioalessiolv2fil-2.0+20100312.git18130f5a+dfsg0/AUTHORS0000644000175000017500000000012211346337452017142 0ustar alessioalessioNedko Arnaudov Fons Adriaensen lv2fil-2.0+20100312.git18130f5a+dfsg0/wscript0000644000175000017500000000205511346337452017517 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 # TODO: check these flags and how to add them to waf # LIBRARIES = -DPIC -Wall # CFLAGS := -g -fPIC -DPIC -Wall -Werror # the following two variables are used by the target "waf dist" VERSION='2.0' APPNAME='lv2fil' # these variables are mandatory ('/' are converted automatically) srcdir = '.' blddir = 'build' def set_options(opt): opt.parser.remove_option('--prefix') # prefix as commonly used concept has no use here, so we remove it to not add confusion opt.tool_options('compiler_cc') opt.tool_options('lv2plugin', tooldir='.') def configure(conf): conf.check_tool('compiler_cc') conf.check_tool('lv2plugin', tooldir='.') conf.check_pkg('lv2core', mandatory=True) conf.env.append_unique('LINKFLAGS', '-lm') def build(bld): filter = bld.create_obj('lv2plugin', type='cc') filter.uselib = 'LV2CORE' filter.target = 'filter' filter.ttl = ['filter.ttl', 'manifest.ttl', 'ui', 'lv2logo.png'] filter.source = ['filter.c', 'lv2filter.c', 'lv2plugin.c', 'log.c', 'lv2_ui.c'] lv2fil-2.0+20100312.git18130f5a+dfsg0/INSTALL0000644000175000017500000000133211346337452017127 0ustar alessioalessio= Compile-time requirements = * lv2core = Run-time requirements = * pygtk * pycairo = Configure it = ./waf configure By default waf will try to autodetect where to install the plugin by checking the LV2_PATH environment variable. If you don't have LV2_PATH set or if you want to override install location, you need to use the --lv2-dir switch. You have to specify full normalized path (i.e. /home/usesrname/xxx instead of ~/xxx). ./waf configure --lv2-dir=/usr/lib/lv2 For full list of options, run: ./waf configure --help = Build it = ./waf You can use -j option to enable building on more than one CPU: ./waf -j 4 = Install it = ./waf install You probably want to run later as superuser to install system-wide lv2fil-2.0+20100312.git18130f5a+dfsg0/log.c0000644000175000017500000000230111346337452017020 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * Copyright (C) 2006,2007,2008,2009 Nedko Arnaudov * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * *****************************************************************************/ #include #include #include #include #include "log.h" void lv2log(int level, const char * format, ...) { va_list arglist; va_start(arglist, format); vprintf(format, arglist); va_end(arglist); } lv2fil-2.0+20100312.git18130f5a+dfsg0/ui0000755000175000017500000012045311346337452016447 0ustar alessioalessio#!/usr/bin/env python # # Copyright (C) 2008,2009 Nedko Arnaudov # Copyright (C) 2006 Leonard Ritter # Filter response code by Fons Adriaensen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. import sys import os import fcntl import gtk import gobject import cairo from math import pi, sin, cos, atan2, log, sqrt, hypot, log10 from colorsys import hls_to_rgb, rgb_to_hls def map_coords_linear(x,y): return x,1.0-y def map_coords_spheric(x,y): nx = cos(x * 2 * pi) * y ny = -sin(x * 2 * pi) * y return nx, ny def get_peaks(f, tolerance=0.01, maxd=0.01, mapfunc=map_coords_linear): corners = 360 yc = 1.0/corners peaks = [] x0,y0 = 0.0,0.0 t0 = -9999.0 i0 = 0 for i in xrange(int(corners)): p = i*yc a = f(p) x,y = mapfunc(p, a) if i == 0: x0,y0 = x,y t = atan2((y0 - y), (x0 - x)) / (2*pi) td = t - t0 if (abs(td) >= tolerance): t0 = t peaks.append((x,y)) x0,y0 = x,y return peaks def make_knobshape(gaps, gapdepth): def knobshape_func(x): x = (x*gaps)%1.0 w = 0.5 g1 = 0.5 - w*0.5 g2 = 0.5 + w*0.5 if (x >= g1) and (x < 0.5): x = (x-g1)/(w*0.5) return 0.5 - gapdepth * x * 0.9 elif (x >= 0.5) and (x < g2): x = (x-0.5)/(w*0.5) return 0.5 - gapdepth * (1-x) * 0.9 else: return 0.5 return get_peaks(knobshape_func, 0.03, 0.05, map_coords_spheric) def hls_to_color(h,l,s): r,g,b = hls_to_rgb(h,l,s) return gtk.gdk.color_parse('#%04X%04X%04X' % (int(r*65535),int(g*65535),int(b*65535))) def color_to_hls(color): string = color.to_string() r = int(string[1:5], 16) / 65535.0 g = int(string[5:9], 16) / 65535.0 b = int(string[9:13], 16) / 65535.0 return rgb_to_hls(r, g, b) MARKER_NONE = '' MARKER_LINE = 'line' MARKER_ARROW = 'arrow' MARKER_DOT = 'dot' LEGEND_NONE = '' LEGEND_DOTS = 'dots' # painted dots LEGEND_LINES = 'lines' # painted ray-like lines LEGEND_RULER = 'ruler' # painted ray-like lines + a circular one LEGEND_RULER_INWARDS = 'ruler-inwards' # same as ruler, but the circle is on the outside LEGEND_LED_SCALE = 'led-scale' # an LCD scale LEGEND_LED_DOTS = 'led-dots' # leds around the knob class KnobTooltip: def __init__(self): self.tooltip_window = gtk.Window(gtk.WINDOW_POPUP) self.tooltip = gtk.Label() #self.tooltip.modify_fg(gtk.STATE_NORMAL, hls_to_color(0.0, 1.0, 0.0)) self.tooltip_timeout = None vbox = gtk.VBox() vbox2 = gtk.VBox() vbox2.add(self.tooltip) vbox2.set_border_width(2) vbox.add(vbox2) self.tooltip_window.add(vbox) vbox.connect('expose-event', self.on_tooltip_expose) def show_tooltip(self, knob): text = knob.format_value() rc = knob.get_allocation() x,y = knob.window.get_origin() self.tooltip_window.show_all() w,h = self.tooltip_window.get_size() wx,wy = x+rc.x-w, y+rc.y+rc.height/2-h/2 self.tooltip_window.move(wx,wy) rc = self.tooltip_window.get_allocation() self.tooltip_window.window.invalidate_rect((0,0,rc.width,rc.height), False) self.tooltip.set_text(text) if self.tooltip_timeout: gobject.source_remove(self.tooltip_timeout) self.tooltip_timeout = gobject.timeout_add(500, self.hide_tooltip) def hide_tooltip(self): self.tooltip_window.hide_all() def on_tooltip_expose(self, widget, event): ctx = widget.window.cairo_create() rc = widget.get_allocation() #ctx.set_source_rgb(*hls_to_rgb(0.0, 0.0, 0.5)) #ctx.paint() ctx.set_source_rgb(*hls_to_rgb(0.0, 0.0, 0.5)) ctx.translate(0.5, 0.5) ctx.set_line_width(1) ctx.rectangle(rc.x, rc.y,rc.width-1,rc.height-1) ctx.stroke() return False knob_tooltip = None def get_knob_tooltip(): global knob_tooltip if not knob_tooltip: knob_tooltip = KnobTooltip() return knob_tooltip class SmartAdjustment(gtk.Adjustment): def __init__(self, log=False, value=0, lower=0, upper=0, step_incr=0, page_incr=0, page_size=0): self.log = log gtk.Adjustment.__init__(self, value, lower, upper, step_incr, page_incr, page_size) self.normalized_value = self.real2norm(self.value) def real2norm(self, value): if self.log: return log(value / self.lower, self.upper / self.lower) else: return (value - self.lower) / (self.upper - self.lower) def norm2real(self, value): if self.log: return self.lower * pow(self.upper / self.lower, value) else: return value * (self.upper - self.lower) + self.lower def set_value(self, value): self.normalized_value = self.real2norm(value) gtk.Adjustment.set_value(self, value) def get_normalized_value(self): return self.normalized_value def set_normalized_value(self, value): self.normalized_value = value if self.normalized_value < 0.0: self.normalized_value = 0.0 elif self.normalized_value > 1.0: self.normalized_value = 1.0 self.set_value(self.norm2real(self.normalized_value)) class Knob(gtk.VBox): def __init__(self): gtk.VBox.__init__(self) self.gapdepth = 4 self.gaps = 10 self.value = 0.0 self.min_value = 0.0 self.max_value = 127.0 self.fg_hls = 0.0, 0.7, 0.0 self.legend_hls = None self.dragging = False self.start = 0.0 self.digits = 2 self.segments = 13 self.label = '' self.marker = MARKER_LINE self.angle = (3.0/4.0) * 2 * pi self.knobshape = None self.legend = LEGEND_DOTS self.lsize = 2 self.lscale = False self.set_double_buffered(True) self.connect('realize', self.on_realize) self.connect("size_allocate", self.on_size_allocate) self.connect('expose-event', self.on_expose) self.set_border_width(6) self.set_size_request(50, 50) self.tooltip_enabled = False self.adj = None def set_adjustment(self, adj): self.min_value = 0.0 self.max_value = 1.0 self.value = adj.get_normalized_value() if self.adj: self.adj.disconnect(self.adj_id) self.adj = adj self.adj_id = adj.connect("value-changed", self.on_adj_value_changed) def is_sensitive(self): return self.get_property("sensitive") def format_value(self): if self.adj: value = self.adj.value else: value = self.value return ("%%.%if" % self.digits) % value def show_tooltip(self): if self.tooltip_enabled: get_knob_tooltip().show_tooltip(self) def on_realize(self, widget): self.root = self.get_toplevel() self.root.add_events(gtk.gdk.ALL_EVENTS_MASK) self.root.connect('scroll-event', self.on_mousewheel) self.root.connect('button-press-event', self.on_left_down) self.root.connect('button-release-event', self.on_left_up) self.root.connect('motion-notify-event', self.on_motion) self.update_knobshape() def update_knobshape(self): rc = self.get_allocation() b = self.get_border_width() size = min(rc.width, rc.height) - 2*b gd = float(self.gapdepth*0.5) / size self.gd = gd self.knobshape = make_knobshape(self.gaps, gd) def set_legend_scale(self, scale): self.lscale = scale self.refresh() def set_legend_line_width(self, width): self.lsize = width self.refresh() def set_segments(self, segments): self.segments = segments self.refresh() def set_marker(self, marker): self.marker = marker self.refresh() def set_range(self, minvalue, maxvalue): self.min_value = minvalue self.max_value = maxvalue self.set_value(self.value) def quantize_value(self, value): scaler = 10**self.digits value = int((value*scaler)+0.5) / float(scaler) return value def on_adj_value_changed(self, adj): new_value = adj.get_normalized_value() if self.value != new_value: self.value = new_value self.refresh() def set_value(self, value): oldval = self.value self.value = min(max(self.quantize_value(value), self.min_value), self.max_value) if self.value != oldval: if self.adj: self.adj.set_normalized_value(value) self.refresh() def get_value(self): return self.value def set_top_color(self, h, l, s): self.fg_hls = h,l,s self.refresh() def set_legend_color(self, h, l, s): self.legend_hls = h,l,s self.refresh() def get_top_color(self): return self.fg_hls def set_gaps(self, gaps): self.gaps = gaps self.knobshape = None self.refresh() def get_gaps(self): return self.gaps def set_gap_depth(self, gapdepth): self.gapdepth = gapdepth self.knobshape = None self.refresh() def get_gap_depth(self): return self.gapdepth def set_angle(self, angle): self.angle = angle self.refresh() def get_angle(self): return self.angle def set_legend(self, legend): self.legend = legend self.refresh() def get_legend(self): return self.legend def on_left_down(self, widget, event): #print "on_left_down" # dont drag insensitive widgets if not self.is_sensitive(): return False if not sum(self.get_allocation().intersect((int(event.x), int(event.y), 1, 1))): return False if event.button == 1: #print "start draggin" self.startvalue = self.value self.start = event.y self.dragging = True self.show_tooltip() self.grab_add() return True return False def on_left_up(self, widget, event): #print "on_left_up" if not self.dragging: return False if event.button == 1: #print "stop draggin" self.dragging = False self.grab_remove() return True return False def on_motion(self, widget, event): #print "on_motion" # dont drag insensitive widgets if not self.is_sensitive(): return False if self.dragging: x,y,state = self.window.get_pointer() rc = self.get_allocation() range = self.max_value - self.min_value scale = rc.height if event.state & gtk.gdk.SHIFT_MASK: scale = rc.height*8 value = self.startvalue - ((y - self.start)*range)/scale oldval = self.value self.set_value(value) self.show_tooltip() if oldval != self.value: self.start = y self.startvalue = self.value return True return False def on_mousewheel(self, widget, event): # dont move insensitive widgets if not self.is_sensitive(): return False if not sum(self.get_allocation().intersect((int(event.x), int(event.y), 1, 1))): return range = self.max_value - self.min_value minstep = 1.0 / (10**self.digits) if event.state & (gtk.gdk.SHIFT_MASK | gtk.gdk.BUTTON1_MASK): step = minstep else: step = max(self.quantize_value(range/25.0), minstep) value = self.value if event.direction == gtk.gdk.SCROLL_UP: value += step elif event.direction == gtk.gdk.SCROLL_DOWN: value -= step self.set_value(value) self.show_tooltip() def on_size_allocate(self, widget, allocation): #print allocation.x, allocation.y, allocation.width, allocation.height self.update_knobshape() def draw_points(self, ctx, peaks): ctx.move_to(*peaks[0]) for peak in peaks[1:]: ctx.line_to(*peak) def draw(self, ctx): if not self.legend_hls: self.legend_hls = color_to_hls(self.style.fg[gtk.STATE_NORMAL]) if not self.knobshape: self.update_knobshape() startangle = pi*1.5 - self.angle*0.5 angle = ((self.value - self.min_value) / (self.max_value - self.min_value)) * self.angle + startangle rc = self.get_allocation() size = min(rc.width, rc.height) kh = self.get_border_width() # knob height ps = 1.0/size # pixel size ps2 = 1.0 / (size-(2*kh)-1) # pixel size inside knob ss = ps * kh # shadow size lsize = ps2 * self.lsize # legend line width # draw spherical ctx.translate(rc.x, rc.y) ctx.translate(0.5,0.5) ctx.translate(size*0.5, size*0.5) ctx.scale(size-(2*kh)-1, size-(2*kh)-1) if self.legend == LEGEND_DOTS: ctx.save() ctx.set_source_rgb(*hls_to_rgb(*self.legend_hls)) dots = self.segments for i in xrange(dots): s = float(i)/(dots-1) a = startangle + self.angle*s ctx.save() ctx.rotate(a) r = lsize*0.5 if self.lscale: r = max(r*s,ps2) ctx.arc(0.5+lsize, 0.0, r, 0.0, 2*pi) ctx.fill() ctx.restore() ctx.restore() elif self.legend in (LEGEND_LINES, LEGEND_RULER, LEGEND_RULER_INWARDS): ctx.save() ctx.set_source_rgb(*hls_to_rgb(*self.legend_hls)) dots = self.segments n = ps2*(kh-1) for i in xrange(dots): s = float(i)/(dots-1) a = startangle + self.angle*s ctx.save() ctx.rotate(a) r = n*0.9 if self.lscale: r = max(r*s,ps2) ctx.move_to(0.5+ps2+n*0.1, 0.0) ctx.line_to(0.5+ps2+n*0.1+r, 0.0) ctx.set_line_width(lsize) ctx.stroke() ctx.restore() ctx.restore() if self.legend == LEGEND_RULER: ctx.save() ctx.set_source_rgb(*hls_to_rgb(*self.legend_hls)) ctx.set_line_width(lsize) ctx.arc(0.0, 0.0, 0.5+ps2+n*0.1, startangle, startangle+self.angle) ctx.stroke() ctx.restore() elif self.legend == LEGEND_RULER_INWARDS: ctx.save() ctx.set_source_rgb(*hls_to_rgb(*self.legend_hls)) ctx.set_line_width(lsize) ctx.arc(0.0, 0.0, 0.5+ps2+n, startangle, startangle+self.angle) ctx.stroke() # draw shadow only for sensitive widgets that have height if self.is_sensitive() and kh: ctx.save() ctx.translate(ss, ss) ctx.rotate(angle) self.draw_points(ctx, self.knobshape) ctx.close_path() ctx.restore() ctx.set_source_rgba(0,0,0,0.3) ctx.fill() if self.legend in (LEGEND_LED_SCALE, LEGEND_LED_DOTS): ch,cl,cs = self.legend_hls n = ps2*(kh-1) ctx.save() ctx.set_line_cap(cairo.LINE_CAP_ROUND) ctx.set_source_rgb(*hls_to_rgb(ch,cl*0.2,cs)) ctx.set_line_width(lsize) ctx.arc(0.0, 0.0, 0.5+ps2+n*0.5, startangle, startangle+self.angle) ctx.stroke() ctx.set_source_rgb(*hls_to_rgb(ch,cl,cs)) if self.legend == LEGEND_LED_SCALE: ctx.set_line_width(lsize-ps2*2) ctx.arc(0.0, 0.0, 0.5+ps2+n*0.5, startangle, angle) ctx.stroke() elif self.legend == LEGEND_LED_DOTS: dots = self.segments dsize = lsize-ps2*2 seg = self.angle/dots endangle = startangle + self.angle for i in xrange(dots): s = float(i)/(dots-1) a = startangle + self.angle*s if ((a-seg*0.5) > angle) or (angle == startangle): break ctx.save() ctx.rotate(a) r = dsize*0.5 if self.lscale: r = max(r*s,ps2) ctx.arc(0.5+ps2+n*0.5, 0.0, r, 0.0, 2*pi) ctx.fill() ctx.restore() ctx.restore() pat = cairo.LinearGradient(-0.5, -0.5, 0.5, 0.5) pat.add_color_stop_rgb(1.0, 0.2,0.2,0.2) pat.add_color_stop_rgb(0.0, 0.3,0.3,0.3) ctx.set_source(pat) ctx.rotate(angle) self.draw_points(ctx, self.knobshape) ctx.close_path() ctx.fill_preserve() ctx.set_source_rgba(0.1,0.1,0.1,1) ctx.save() ctx.identity_matrix() ctx.set_line_width(1.0) ctx.stroke() ctx.restore() ctx.arc(0.0, 0.0, 0.5-self.gd, 0.0, pi*2.0) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], max(self.fg_hls[1]*0.4,0.0), self.fg_hls[2])) ctx.fill() ctx.arc(0.0, 0.0, 0.5-self.gd-ps, 0.0, pi*2.0) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], min(self.fg_hls[1]*1.2,1.0), self.fg_hls[2])) ctx.fill() ctx.arc(0.0, 0.0, 0.5-self.gd-(2*ps), 0.0, pi*2.0) ctx.set_source_rgb(*hls_to_rgb(*self.fg_hls)) ctx.fill() # dont draw cap for insensitive widgets if not self.is_sensitive(): return #~ ctx.set_line_cap(cairo.LINE_CAP_ROUND) #~ ctx.move_to(0.5-0.3-self.gd-ps, 0.0) #~ ctx.line_to(0.5-self.gd-ps*5, 0.0) if self.marker == MARKER_LINE: ctx.set_line_cap(cairo.LINE_CAP_BUTT) ctx.move_to(0.5-0.3-self.gd-ps, 0.0) ctx.line_to(0.5-self.gd-ps, 0.0) ctx.save() ctx.identity_matrix() ctx.translate(0.5,0.5) ctx.set_line_width(5) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], min(self.fg_hls[1]*1.2,1.0), self.fg_hls[2])) ctx.stroke_preserve() ctx.set_line_width(3) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], max(self.fg_hls[1]*0.4,0.0), self.fg_hls[2])) ctx.stroke() ctx.restore() elif self.marker == MARKER_DOT: ctx.arc(0.5-0.05-self.gd-ps*5, 0.0, 0.05, 0.0, 2*pi) ctx.save() ctx.identity_matrix() ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], min(self.fg_hls[1]*1.2,1.0), self.fg_hls[2])) ctx.stroke_preserve() ctx.set_line_width(1) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], max(self.fg_hls[1]*0.4,0.0), self.fg_hls[2])) ctx.fill() ctx.restore() elif self.marker == MARKER_ARROW: ctx.set_line_cap(cairo.LINE_CAP_BUTT) ctx.move_to(0.5-0.3-self.gd-ps, 0.1) ctx.line_to(0.5-0.1-self.gd-ps, 0.0) ctx.line_to(0.5-0.3-self.gd-ps, -0.1) ctx.close_path() ctx.save() ctx.identity_matrix() #~ ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], min(self.fg_hls[1]*1.2,1.0), self.fg_hls[2])) #~ ctx.stroke_preserve() ctx.set_line_width(1) ctx.set_source_rgb(*hls_to_rgb(self.fg_hls[0], max(self.fg_hls[1]*0.4,0.0), self.fg_hls[2])) ctx.fill() ctx.restore() def refresh(self): rect = self.get_allocation() if self.window: self.window.invalidate_rect(rect, False) return True def on_expose(self, widget, event): self.context = self.window.cairo_create() self.draw(self.context) return False class filter_band: def __init__(self): self.fsamp = 48e3 def set_params(self, freq, bandw, gain): freq_ratio = freq / self.fsamp gain2 = pow(10.0, 0.05 * gain) b = 7 * bandw * freq_ratio / sqrt(gain2) self.gn = 0.5 * (gain2 - 1) self.v1 = -cos(2 * pi * freq_ratio) self.v2 = (1 - b) / (1 + b) self.v1 *= (1 + self.v2) self.gn *= (1 - self.v2) def get_response(self, freq): w = 2 * pi * (freq / self.fsamp) c1 = cos(w) s1 = sin(w) c2 = cos(2 * w) s2 = sin(2 * w) x = c2 + self.v1 * c1 + self.v2 y = s2 + self.v1 * s1 t1 = hypot(x, y) x += self.gn * (c2 - 1) y += self.gn * s2 t2 = hypot(x, y) #return t2 / t1 return 20 * log10(t2 / t1) class frequency_response(gtk.DrawingArea): def __init__(self): gtk.DrawingArea.__init__(self) self.connect("expose-event", self.on_expose) self.connect("size-request", self.on_size_request) self.connect("size_allocate", self.on_size_allocate) self.color_bg = gtk.gdk.Color(0,0,0) self.color_value = gtk.gdk.Color(int(65535 * 0.8), int(65535 * 0.7), 0) self.color_mark = gtk.gdk.Color(int(65535 * 0.3), int(65535 * 0.3), int(65535 * 0.3)) self.color_sum = gtk.gdk.Color(int(65535 * 1.0), int(65535 * 1.0), int(65535 * 1.0)) self.width = 0 self.height = 0 self.margin = 10 self.db_range = 30 self.master_gain = 0.0 self.master_enabled = False self.filters = {} def on_expose(self, widget, event): cairo_ctx = widget.window.cairo_create() # set a clip region for the expose event cairo_ctx.rectangle(event.area.x, event.area.y, event.area.width, event.area.height) cairo_ctx.clip() self.draw(cairo_ctx) return False def on_size_allocate(self, widget, allocation): #print allocation.x, allocation.y, allocation.width, allocation.height self.width = float(allocation.width) self.height = float(allocation.height) self.font_size = 10 def on_size_request(self, widget, requisition): #print "size-request, %u x %u" % (requisition.width, requisition.height) requisition.width = 150 requisition.height = 150 return def invalidate_all(self): self.queue_draw_area(0, 0, int(self.width), int(self.height)) def get_x(self, hz): width = self.width - 3.5 * self.margin #x = self.margin + width * (hz - 20) / (20000 - 20) x = 2.5 * self.margin + width * log(hz / 20.0, 1000.0) #print x return x def get_freq(self, x): width = self.width - 3.5 * self.margin return 20 * pow(1000, (x - 2.5 * self.margin) / width) def get_y(self, db): height = self.height - 2.5 * self.margin y = self.margin + height * (self.db_range - db) / (self.db_range * 2) #print y return y def draw_db_grid(self, cairo_ctx, db): x = self.get_x(20) y = self.get_y(db) cairo_ctx.move_to(x, y) cairo_ctx.line_to(self.get_x(20000), y) if db % 10 == 0: x -= 20 y += 3 cairo_ctx.move_to(x, y) label = "%+d" % db if db == 0: label = " " + label cairo_ctx.show_text(label) cairo_ctx.stroke() def invalidate_all(self): self.queue_draw_area(0, 0, int(self.width), int(self.height)) def draw(self, cairo_ctx): cairo_ctx.select_font_face("Fixed") cairo_ctx.set_source_color(self.color_bg) cairo_ctx.rectangle(0, 0, self.width, self.height) cairo_ctx.fill() cairo_ctx.set_source_color(self.color_mark) cairo_ctx.set_line_width(1); for hz in range(20, 101, 10) + range(100, 1001, 100) + range(1000, 10001, 1000) + range(10000, 20001, 10000): if hz >= 10000: label = "%dk" % int(hz / 1000) elif hz >= 1000: label = "%dk" % int(hz / 1000) else: label = "%d" % int(hz) first_digit = int(label[0]) if first_digit > 5 or (first_digit > 3 and (len(label) == 3)): label = None x = self.get_x(hz) cairo_ctx.move_to(x, self.get_y(self.db_range)) y = self.get_y(-self.db_range) cairo_ctx.line_to(x, y) if label: y += 10 if hz == 20000: x -= 15 elif hz != 20: x -= 3 cairo_ctx.move_to(x, y) cairo_ctx.show_text(label) cairo_ctx.stroke() for db in range(0, self.db_range + 1, 5): self.draw_db_grid(cairo_ctx, db) if db != 0: self.draw_db_grid(cairo_ctx, -db) curves = [[x, {}, self.master_gain, self.get_freq(x)] for x in range(int(self.get_x(20)), int(self.get_x(20e3)))] #print repr(curves) # calculate filter responses for label, filter in self.filters.items(): if not filter.enabled: continue for point in curves: db = filter.get_response(point[3]) point[1][label] = [self.get_y(db), db] # calculate sum curve for point in curves: for label, filter_point in point[1].items(): point[2] += filter_point[1] #print point # draw filter curves for label, filter in self.filters.items(): if not filter.enabled: continue cairo_ctx.set_source_color(filter.color) cairo_ctx.move_to(curves[0][0], curves[0][1][label][0]) for point in curves: cairo_ctx.line_to(point[0], point[1][label][0]) cairo_ctx.stroke() if self.master_enabled: # draw sum curve cairo_ctx.set_source_color(self.color_sum) cairo_ctx.set_line_width(2); cairo_ctx.move_to(curves[0][0], self.get_y(curves[0][2])) for point in curves: cairo_ctx.line_to(point[0], self.get_y(point[2])) cairo_ctx.stroke() # draw base point markers for label, filter in self.filters.items(): if not filter.enabled: continue cairo_ctx.set_source_color(self.color_value) x = self.get_x(filter.adj_hz.value) y = self.get_y(filter.adj_db.value) cairo_ctx.move_to(x, y) cairo_ctx.show_text(label) cairo_ctx.stroke() def add_filter(self, label, adj_hz, adj_db, adj_bw, color): #print "filter %s added (%.2f Hz, %.2f dB, %.2f bw)" % (label, adj_hz.value, adj_db.value, adj_bw.value) filter = filter_band() filter.enabled = False filter.label = label filter.color = color filter.set_params(adj_hz.value, adj_bw.value, adj_db.value) adj_hz.filter = filter adj_db.filter = filter adj_bw.filter = filter filter.adj_hz = adj_hz filter.adj_db = adj_db filter.adj_bw = adj_bw adj_hz.connect("value-changed", self.on_value_change_request) adj_db.connect("value-changed", self.on_value_change_request) adj_bw.connect("value-changed", self.on_value_change_request) self.filters[label] = filter def enable_filter(self, label): filter = self.filters[label] #print "filter %s enabled (%.2f Hz, %.2f dB, %.2f bw)" % (label, filter.adj_hz.value, filter.adj_db.value, filter.adj_bw.value) filter.enabled = True self.invalidate_all() def disable_filter(self, label): filter = self.filters[label] #print "filter %s disabled (%.2f Hz, %.2f dB, %.2f bw)" % (label, filter.adj_hz.value, filter.adj_db.value, filter.adj_bw.value) filter.enabled = False self.invalidate_all() def on_value_change_request(self, adj): #print "adj changed" adj.filter.set_params(adj.filter.adj_hz.value, adj.filter.adj_bw.value, adj.filter.adj_db.value) self.invalidate_all() def master_enable(self): self.master_enabled = True; self.invalidate_all() def master_disable(self): self.master_enabled = False; self.invalidate_all() def set_master_gain(self, gain): self.master_gain = gain; self.invalidate_all() class filter_ui: def __init__(self, argv): self.fake = len(argv) == 1 if self.fake: self.plugin_uri = self.human_id = "fake" self.bundle_path = "." self.shown = False else: #print repr(argv) self.plugin_uri = argv[1] self.bundle_path = argv[2] self.human_id = argv[3] self.recv_pipe_fd = int(argv[4]) self.send_pipe_fd = int(argv[5]) oldflags = fcntl.fcntl(self.recv_pipe_fd, fcntl.F_GETFL) fcntl.fcntl(self.recv_pipe_fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK) self.recv_pipe = os.fdopen(self.recv_pipe_fd, 'r') self.send_pipe = os.fdopen(self.send_pipe_fd, 'w') if self.plugin_uri == "http://nedko.aranaudov.org/soft/filter/2/mono": self.port_base = 2 elif self.plugin_uri == "http://nedko.aranaudov.org/soft/filter/2/stereo": self.port_base = 4 elif self.plugin_uri == "fake": self.port_base = 0 else: return self.lv2logo = gtk.gdk.pixbuf_new_from_file(self.bundle_path + "/lv2logo.png") self.window = gtk.Window(gtk.WINDOW_TOPLEVEL) #self.window.set_size_request(600, 400) self.window.set_title("%s (4-band parametric filter)" % self.human_id) self.window.set_role("plugin_ui") self.top_vbox = gtk.VBox() self.top_vbox.set_spacing(10) align = gtk.Alignment(0.5, 0.5, 1.0, 1.0) align.set_padding(10, 10, 10, 10) align.add(self.top_vbox) self.window.add(align) self.fr = frequency_response() self.fr.set_size_request(400, 200) frame = gtk.Frame() frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT) frame.add(self.fr) self.top_vbox.pack_start(frame, True, True) self.param_hbox = gtk.HBox() self.top_vbox.pack_start(self.param_hbox) self.param_hbox.set_spacing(10) self.initator = False self.ports = [] misc_box = gtk.VBox() misc_box.set_spacing(5) master_frame = gtk.Frame("Master") master_frame.set_label_align(0.5, 0.5) master_box = gtk.VBox() master_box.set_spacing(5) port = {'index': 0, 'name': 'Active', 'type': 'toggle'} self.ports.append(port) self.add_param_box(master_box, self.create_toggle_box(port)) port = {'index': 1, 'name': 'Gain', 'type': 'knob', 'min': -20.0, 'max': 20.0, 'unit': 'dB', 'log': False} self.ports.append(port) self.add_param_box(master_box, self.create_knob_box(port)) master_frame.add(master_box) misc_box.pack_start(master_frame, False, False) #logo = gtk.Image() #logo.set_from_pixbuf(self.lv2logo) #misc_box.pack_start(logo, True, True) button_box = gtk.VBox() button = gtk.Button(stock=gtk.STOCK_ABOUT) button.connect("clicked", self.on_about) button_box.pack_start(button) button = gtk.Button(stock=gtk.STOCK_CLOSE) button.connect("clicked", self.on_window_closed) button_box.pack_start(button) align = gtk.Alignment(0.5, 1.0, 1.0, 0.0) align.add(button_box) misc_box.pack_start(align, True, True) band_parameters = [ {'name': 'Active', 'type': 'toggle'}, {'name': 'Frequency', 'type': 'knob', 'unit': 'Hz', 'log': True}, {'name': 'Bandwidth', 'type': 'knob', 'min': 0.125, 'max': 8.0, 'unit': '', 'log': True}, {'name': 'Gain', 'type': 'knob', 'min': -20.0, 'max': 20.0, 'unit': 'dB', 'log': False}] freq_min = [ 20.0, 40.0, 100.0, 200.0] freq_max = [2000.0, 4000.0, 10000.0, 20000.0] port_index = 2 filter_colors = [gtk.gdk.Color(int(65535 * 1.0), int(65535 * 0.6), int(65535 * 0.0)), gtk.gdk.Color(int(65535 * 0.6), int(65535 * 1.0), int(65535 * 0.6)), gtk.gdk.Color(int(65535 * 0.0), int(65535 * 0.6), int(65535 * 1.0)), gtk.gdk.Color(int(65535 * 0.9), int(65535 * 0.0), int(65535 * 0.5))] for i in [0, 1, 2, 3]: band_frame = gtk.Frame("Band %d" % (i + 1)) band_frame.set_label_align(0.5, 0.5) band_box = gtk.VBox() band_box.set_spacing(5) for parameter in band_parameters: port = parameter.copy() port['index'] = port_index port_index += 1 if port['name'] == 'Frequency': port['min'] = freq_min[i] port['max'] = freq_max[i] self.ports.append(port) #param_box.set_spacing(5) if port['type'] == 'knob': self.add_param_box(band_box, self.create_knob_box(port)) elif port['type'] == 'toggle': self.add_param_box(band_box, self.create_toggle_box(port)) self.fr.add_filter( str(i + 1), self.ports[port_index - 3]['adj'], # frequency self.ports[port_index - 1]['adj'], # gain self.ports[port_index - 2]['adj'], # bandwidth filter_colors[i]) band_frame.add(band_box) self.param_hbox.pack_start(band_frame, True, True) self.param_hbox.pack_start(misc_box, True, True) self.initator = True def on_about(self, widget): about = gtk.AboutDialog() about.set_transient_for(self.window) about.set_name("4-band parametric filter") #about.set_website(program_data['website']) about.set_authors(["Nedko Arnaudov - LV2 plugin and GUI", 'Fons Adriaensen - DSP code']) about.set_artists(["LV2 logo has been designed by Thorsten Wilms, based on a concept from Peter Shorthose."]) about.set_logo(self.lv2logo) about.show() about.run() about.hide() def create_knob_box(self, port): param_box = gtk.VBox() step = (port['max'] - port['min']) / 100 adj = SmartAdjustment(port['log'], port['min'], port['min'], port['max'], step, step * 20) adj.port = port port['adj'] = adj adj.connect("value-changed", self.on_adj_value_changed) knob = Knob() knob.set_adjustment(adj) align = gtk.Alignment(0.5, 0.5, 0, 0) align.set_padding(0, 0, 20, 20) align.add(knob) param_box.pack_start(align, False) adj.label = gtk.Label(self.get_adj_value_text(adj)[0]) param_box.pack_start(adj.label, False) #spin = gtk.SpinButton(adj, 0.0, 2) #param_box.pack_start(spin, False) label = gtk.Label(port['name']) param_box.pack_start(label, False) return param_box def create_toggle_box(self, port): param_box = gtk.VBox() button = gtk.CheckButton(port['name']) button.port = port port['widget'] = button button.connect("toggled", self.on_button_toggled) align = gtk.Alignment(0.5, 0.5, 0, 0) align.add(button) param_box.pack_start(align, False) return param_box def add_param_box(self, container, param_box): align = gtk.Alignment(0.5, 0.5, 1.0, 1.0) align.set_padding(10, 10, 10, 10) align.add(param_box) container.pack_start(align, True) def get_adj_value_text(self, adj): value = adj.get_value() if value >= 10000: format = "%.0f" elif value >= 1000: format = "%.1f" else: format = "%.2f" text = format % value unit = adj.port['unit'] if unit: text += " " + unit return value, text def on_adj_value_changed(self, adj): value, text = self.get_adj_value_text(adj) adj.label.set_text(text) if adj.port['index'] == 1: #print "Master gain = %.2f dB" % adj.get_value() self.fr.set_master_gain(adj.get_value()) if self.initator: #print adj.port, adj.get_value() self.send_port_value(adj.port['index'] + self.port_base, value) def on_button_toggled(self, widget): port_index = widget.port['index'] band_no = (port_index - 2) / 4 + 1 if widget.get_active(): value = 1.0 if band_no > 0: self.fr.enable_filter(str(band_no)) else: self.fr.master_enable() else: value = 0.0 if band_no > 0: self.fr.disable_filter(str(band_no)) else: self.fr.master_disable() if self.initator: self.send_port_value(port_index + self.port_base, value) def send(self, lines): if self.fake: return for line in lines: #print 'send: "' + line + '"' self.send_pipe.write(line + "\n") self.send_pipe.flush() def send_exiting(self): self.send(["exiting"]) def send_port_value(self, port_index, value): self.send(["port_value", str(port_index), "%.10f" % value]) def send_hi(self): self.send([""]) # send empty line (just newline char) def recv_line(self): return self.recv_pipe.readline().strip() def recv_command(self): try: msg = self.recv_line() if msg == "port_value": port_index = int(self.recv_line()) port_value = float(self.recv_line()) #print "port_value_change recevied: %d %f" % (port_index, port_value) self.on_port_value_changed(port_index, port_value) elif msg == "show": self.on_show() elif msg == "hide": self.on_hide() elif msg == "quit": self.on_quit() else: print 'unknown message: "' + msg + '"' return True except IOError: return False def on_recv(self, fd, cond): #print "on_recv" if cond == gobject.IO_HUP: gtk.main_quit() return False while True: if not self.recv_command(): break return True def run(self): self.window.connect("destroy", self.on_window_closed) if self.fake: if not self.shown: self.shown = True self.on_show() else: self.send_hi() gobject.io_add_watch(self.recv_pipe_fd, gobject.IO_IN | gobject.IO_HUP, self.on_recv) gtk.main() def on_port_value_changed(self, port_index, port_value): #print "port %d set to %f" % (port_index, port_value) port_index -= self.port_base port = self.ports[port_index] #print repr(port) port_type = port['type'] if port_type == 'knob': self.initator = False port['adj'].set_value(port_value) self.initator = True elif port_type == 'toggle': if port_value > 0.0: toggled = True else: toggled = False self.initator = False port['widget'].set_active(toggled) self.initator = True def on_show(self): self.window.show_all() def on_hide(self): self.window.hide_all() def on_quit(self): gtk.main_quit() def on_window_closed(self, arg): self.send_exiting() gtk.main_quit() def main(): filter_ui(sys.argv).run() #print "main done" if __name__ == '__main__': main() lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/0000755000175000017500000000000011703032664017657 5ustar alessioalessiolv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/DirWatch.py0000664000175000017500000001361410771525014021745 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn , 2006 "DirWatch chooses a supported backend (fam, gamin or fallback) it is mainly a wrapper script without own methods beside this" from Params import debug import GaminAdaptor, FamAdaptor, FallbackAdaptor import os class WatchObject: def __init__( self, idxName, namePath, isDir, callBackThis, handleEvents ): """watch object to handle a watch @param idxName: unique name for ref @param dirList: path to watch @param isDir: directory True or False @param callBackThis: is called if something in dirs in dirlist has events (handleEvents) callBackThis(idxName, changedFilePath) @param handleEvents: events to handle possible are 'changed', 'deleted', 'created', 'exist' suspendDirWatch after a handled change """ self.__adaptor = None self.__fr = None self.__idxName = idxName self.__name = namePath self.__isDir = isDir self.__callBackThis = callBackThis self.__handleEvents = handleEvents def __del__( self ): self.unwatch() def watch( self, adaptor ): """start watching @param adaptor: dirwatch adaptor for backend """ self.__adaptor = adaptor if self.__fr != None: self.unwatch() if self.__isDir: self.__fr = self.__adaptor.watch_directory( self.__name, self.__idxName ) else: self.__fr = self.__adaptor.watch_file( self.__name, self.__idxName ) def unwatch( self ): """stop watching""" if self.__fr: self.__fr = self.__adaptor.stop_watch( self.__name ) def get_events( self ): """returns all events to care""" return self.__handleEvents def get_callback( self ): """returns the callback methode""" return self.__callBackThis def get_fullpath( self, fileName ): """returns the full path dir + filename""" return os.path.join( self.__name, fileName ) def __str__( self ): if self.__isDir: return 'DIR %s: ' % self.__name else: return 'FILE %s: ' % self.__name class DirectoryWatcher: """DirWatch chooses a supported backend (fam, gamin or fallback) it is mainly a wrapper script without own methods beside this """ def __init__( self ): self.__adaptor = None self.__watcher = {} self.__loops = True self.connect() def __del__ ( self ): self.disconnect() def __raise_disconnected( self ): raise( "Already disconnected" ) def disconnect( self ): if self.__adaptor: self.suspend_all_watch() self.__adaptor = None def connect( self ): if self.__adaptor: self.disconnect() if FamAdaptor.support: debug( "using FamAdaptor" ) self.__adaptor = FamAdaptor.FamAdaptor( self.__processDirEvents ) if self.__adaptor == None: raise "something is strange" elif GaminAdaptor.support: debug( "using GaminAdaptor" ) self.__adaptor = GaminAdaptor.GaminAdaptor(self.__processDirEvents) else: debug( "using FallbackAdaptor" ) self.__adaptor = FallbackAdaptor.FallbackAdaptor(self.__processDirEvents) def add_watch( self, idxName, callBackThis, dirList, handleEvents = ['changed', 'deleted', 'created'] ): """add dirList to watch. @param idxName: unique name for ref @param callBackThis: is called if something in dirs in dirlist has events (handleEvents) callBackThis(idxName, changedFilePath) @param dirList: list of dirs to watch @param handleEvents: events to handle possible are 'changed', 'deleted', 'created', 'exist' suspendDirWatch after a handled change """ self.remove_watch( idxName ) self.__watcher[idxName] = [] for directory in dirList: watchObject = WatchObject( idxName, os.path.abspath( directory ), 1, callBackThis, handleEvents ) self.__watcher[idxName].append( watchObject ) self.resume_watch( idxName ) def remove_watch( self, idxName ): """remove DirWatch with name idxName""" if self.__watcher.has_key( idxName ): self.suspend_watch( idxName ) del self.__watcher[idxName] def remove_all_watch( self ): """remove all DirWatcher""" self.__watcher = {} def suspend_watch( self, idxName ): """suspend DirWatch with name idxName. No dir/filechanges will be reacted until resume""" if self.__watcher.has_key( idxName ): for watchObject in self.__watcher[idxName]: watchObject.unwatch() def suspend_all_watch( self ): """suspend all DirWatcher ... they could be resumed with resume_all_watch""" for idxName in self.__watcher.keys(): self.suspend_watch( idxName ) def resume_watch( self, idxName ): """resume a DirWatch that was supended with suspendDirWatch or suspendAllDirWatch""" for watchObject in self.__watcher[idxName]: watchObject.watch( self.__adaptor ) def resume_all_watch( self ): """ resume all DirWatcher""" for idxName in self.__watcher.keys(): self.resume_watch( idxName ) def __processDirEvents( self, pathName, event, idxName ): if event in self.__watcher[idxName][0].get_events(): #self.disconnect() self.suspend_watch(idxName) __watcher = self.__watcher[idxName][0] __watcher.get_callback()( idxName, __watcher.get_fullpath( pathName ), event ) #self.connect() self.resume_watch( idxName ) def request_end_loop( self ): """sets a flag that stops the loop. it do not stop the loop directly!""" self.__loops = False def loop( self ): """wait for dir events and start handling of them""" try: self.__loops = True while ( self.__loops ) and ( self.__adaptor != None ) : self.__adaptor.wait_for_event() while self.__adaptor.event_pending(): self.__adaptor.handle_events() if not self.__loops: break except KeyboardInterrupt: self.request_end_loop() # quick test # class Test: def __init__( self ): self.fam_test = DirectoryWatcher() self.fam_test.add_watch( "tmp Test", self.thisIsCalledBack, ["/tmp"] ) self.fam_test.loop() # self.fam_test.loop() def thisIsCalledBack( self, idxName, pathName, event ): print "idxName=%s, Path=%s, Event=%s " % ( idxName, pathName, event ) self.fam_test.resume_watch( idxName ) if __name__ == "__main__": Test() lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Node.py0000664000175000017500000003177710771525014021137 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) """ Node: filesystem structure, contains lists of nodes self.m_dirs : sub-folders self.m_files : files existing in the src dir self.m_build : nodes produced in the build dirs A folder is represented by exactly one node IMPORTANT: Some would-be class properties are stored in Build: nodes to depend on, signature, flags, .. In fact, unused class members increase the .wafpickle file size sensibly with lots of objects eg: the m_tstamp is used for every node, while the signature is computed only for build files the build is launched from the top of the build dir (for example, in _build_/) """ import os import Params, Utils from Params import debug, error, fatal class Node(object): def __init__(self, name, parent): self.m_name = name self.m_parent = parent self.m_cached_path = "" self.hash_value = None # Lookup dictionaries for O(1) access self.m_dirs_lookup = {} self.m_files_lookup = {} self.m_build_lookup = {} # The checks below could be disabled for speed, if necessary # TODO check for . .. / \ in name # Node name must contain only one level if Utils.split_path(name)[0] != name: fatal('name forbidden '+name) if parent: if parent.get_file(name): fatal('node %s exists in the parent files %s already' % (name, str(parent))) if parent.get_build(name): fatal('node %s exists in the parent build %s already' % (name, str(parent))) def __str__(self): if self.m_name in self.m_parent.m_build_lookup: isbld = "bld" elif self.m_name in self.m_parent.m_dirs_lookup: isbld = "dir" else: isbld = "src" return "%s://%s" % (isbld, self.abspath()) def __repr__(self): if self.m_name in self.m_parent.m_build_lookup: isbld = "bld" elif self.m_name in self.m_parent.m_dirs_lookup: isbld = "dir" else: isbld = "src" return "%s://%s" % (isbld, self.abspath()) def __eq__(self, other): # avoid collisions by looking at the parents if not self.m_parent: if other.m_parent: return 0 elif self.m_parent.hash_value != other.m_parent.hash_value: return 0 return self.m_name == other.m_name def __ne__(self, other): return not self.__eq__(other) def __hash__(self): 'hash value based on the abs path' if not self.hash_value: cur = self lst = [] while cur: lst.append(cur.m_name) cur = cur.m_parent if lst[-1] == '': lst = lst[:-1] if lst[0] =='/': lst = lst[1:] lst.reverse() val = os.path.join(*lst) debug("[%s]" % val, 'node') self.hash_value = hash(val) return self.hash_value # TODO deprecated, remove this function def equals(self, node): return self.hash_value == node.hash_value def dirs(self): return self.m_dirs_lookup.values() def get_dir(self, name, default=None): return self.m_dirs_lookup.get(name, default) def append_dir(self, dir): self.m_dirs_lookup[dir.m_name] = dir def files(self): return self.m_files_lookup.values() def get_file(self, name, default=None): return self.m_files_lookup.get(name, default) def append_file(self, dir): self.m_files_lookup[dir.m_name] = dir def get_build(self, name, default=None): return self.m_build_lookup.get(name, default) # for the build variants, the same nodes are used to save memory # the timestamps/signatures are accessed using the following methods def get_tstamp_variant(self, variant): vars = Params.g_build.m_tstamp_variants[variant] try: return vars[variant] except KeyError: return None def set_tstamp_variant(self, variant, value): Params.g_build.m_tstamp_variants[variant][self] = value def get_tstamp_node(self): try: return Params.g_build.m_tstamp_variants[0][self] except KeyError: return None def set_tstamp_node(self, value): Params.g_build.m_tstamp_variants[0][self] = value ## ===== BEGIN find methods ===== ## def find_build(self, path, create=0): #print "find build", path lst = Utils.split_path(path) return self.find_build_lst(lst, create) def find_build_lst(self, lst, create=0): "search a source or a build node in the filesystem, rescan intermediate folders, create if necessary" rescan = Params.g_build.rescan current = self while lst: rescan(current) name = lst.pop(0) prev = current if name == '.': continue elif name == '..': current = current.m_parent continue if lst: current = prev.m_dirs_lookup.get(name, None) if not current and create: current = Node(name, prev) prev.m_dirs_lookup[name] = current else: current = prev.m_build_lookup.get(name, None) # next line for finding source files too if not current: current = prev.m_files_lookup.get(name, None) # TODO do not use this for finding folders if not current: current = Node(name, prev) # last item is the build file (rescan would have found the source) prev.m_build_lookup[name] = current return current def find_source(self, path, create=1): lst = Utils.split_path(path) return self.find_source_lst(lst, create) def find_source_lst(self, lst, create=1): "search a source in the filesystem, rescan intermediate folders, create intermediate folders if necessary" rescan = Params.g_build.rescan current = self while lst: rescan(current) name = lst.pop(0) prev = current if name == '.': continue elif name == '..': current = current.m_parent continue if lst: current = prev.m_dirs_lookup.get(name, None) if not current and create: # create a directory current = Node(name, prev) prev.m_dirs_lookup[name] = current else: current = prev.m_files_lookup.get(name, None) # try hard to find something if not current: current = prev.m_dirs_lookup.get(name, None) if not current: current = prev.m_build_lookup.get(name, None) if not current: return None return current def find_raw(self, path): lst = Utils.split_path(path) return self.find_raw_lst(lst) def find_raw_lst(self, lst): "just find a node in the tree, do not rescan folders" current = self while lst: name = lst.pop(0) prev = current if name == '.': continue elif name == '..': current = current.m_parent continue current = prev.m_dirs_lookup[name] if not current: current=prev.m_files_lookup[name] if not current: current=prev.m_build_lookup[name] if not current: return None return current def ensure_node_from_lst(self, plst): curnode = self for dirname in plst: if not dirname: continue if dirname == '.': continue if dirname == '..': curnode = curnode.m_parent continue #found=None found = curnode.get_dir(dirname, None) #for cand in curnode.m_dirs: # if cand.m_name == dirname: # found = cand # break if not found: found = Node(dirname, curnode) curnode.append_dir(found) curnode = found return curnode def find_dir(self, path): lst = Utils.split_path(path) return self.find_dir_lst(lst) def find_dir_lst(self, lst): "search a folder in the filesystem, do not scan, create if necessary" current = self while lst: name = lst.pop(0) prev = current if name == '.': continue elif name == '..': current = current.m_parent else: current = prev.m_dirs_lookup.get(name, None) if not current: current = Node(name, prev) # create a directory prev.m_dirs_lookup[name] = current return current ## ===== END find methods ===== ## ## ===== BEGIN relpath-related methods ===== ## # same as pathlist3, but do not append './' at the beginning def pathlist4(self, node): #print "pathlist4 called" if self == node: return [] if self.m_parent == node: return [self.m_name] return [self.m_name, os.sep] + self.m_parent.pathlist4(node) def relpath(self, parent): "path relative to a direct parent, as string" lst = [] p = self h1 = parent.height() h2 = p.height() while h2 > h1: h2 -= 1 lst.append(p.m_name) p = p.m_parent if lst: lst.reverse() ret = os.path.join(*lst) else: ret = '' return ret # find a common ancestor for two nodes - for the shortest path in hierarchy def find_ancestor(self, node): dist = self.height() - node.height() if dist < 0: return node.find_ancestor(self) # now the real code cand = self while dist > 0: cand = cand.m_parent dist -= 1 if cand == node: return cand cursor = node while cand.m_parent: cand = cand.m_parent cursor = cursor.m_parent if cand == cursor: return cand # prints the amount of "../" between two nodes def invrelpath(self, parent): lst = [] cand = self while not cand == parent: cand = cand.m_parent lst += ['..', os.sep] return lst # TODO: do this in a single function (this one uses invrelpath, find_ancestor and pathlist4) # string representing a relative path between two nodes, we are at relative_to def relpath_gen(self, going_to): if self == going_to: return '.' if going_to.m_parent == self: return '..' # up_path is '../../../' and down_path is 'dir/subdir/subdir/file' ancestor = self.find_ancestor(going_to) up_path = going_to.invrelpath(ancestor) down_path = self.pathlist4(ancestor) down_path.reverse() return "".join(up_path + down_path) def nice_path(self, env=None): "printed in the console, open files easily from the launch directory" tree = Params.g_build ln = tree.launch_node() name = self.m_name x = self.m_parent.get_file(name) if x: return self.relative_path(ln) else: return os.path.join(tree.m_bldnode.relative_path(ln), env.variant(), self.relative_path(tree.m_srcnode)) def relative_path(self, folder): "relative path between a node and a directory node" hh1 = h1 = self.height() hh2 = h2 = folder.height() p1 = self p2 = folder while h1 > h2: p1 = p1.m_parent h1 -= 1 while h2 > h1: p2 = p2.m_parent h2 -= 1 # now we have two nodes of the same height ancestor = None if p1.m_name == p2.m_name: ancestor = p1 while p1.m_parent: p1 = p1.m_parent p2 = p2.m_parent if p1.m_name != p2.m_name: ancestor = None elif not ancestor: ancestor = p1 anh = ancestor.height() n1 = hh1-anh n2 = hh2-anh lst = [] tmp = self while n1: n1 -= 1 lst.append(tmp.m_name) tmp = tmp.m_parent lst.reverse() up_path = os.sep.join(lst) down_path = (".."+os.sep) * n2 return "".join(down_path + up_path) ## ===== END relpath-related methods ===== ## def debug(self): print "========= debug node =============" print "dirs are ", self.dirs() print "files are", self.files() print "======= end debug node ===========" def is_child_of(self, node): "does this node belong to the subtree node" p = self diff = self.height() - node.height() while diff > 0: diff -= 1 p = p.m_parent return p == node def variant(self, env): "variant, or output directory for this node, a source has for variant 0" if not env: return 0 i = self.m_parent.get_file(self.m_name) if i: return 0 return env.variant() def size_subtree(self): "for debugging, returns the amount of subnodes" l_size = 1 for i in self.dirs(): l_size += i.size_subtree() l_size += len(self.files()) return l_size def height(self): "amount of parents" # README a cache can be added here if necessary d = self val = 0 while d.m_parent: d = d.m_parent val += 1 return val # helpers for building things def abspath(self, env=None): "absolute path" variant = self.variant(env) try: ret = Params.g_build.m_abspath_cache[variant][self] return ret except KeyError: if not variant: cur = self lst = [] while cur: lst.append(cur.m_name) cur = cur.m_parent lst.reverse() val = os.path.join(*lst) else: val = os.path.join(Params.g_build.m_bldnode.abspath(), env.variant(), self.relpath(Params.g_build.m_srcnode)) Params.g_build.m_abspath_cache[variant][self]=val return val def change_ext(self, ext): "node of the same path, but with a different extension" name = self.m_name k = name.rfind('.') if k >= 0: newname = name[:k] + ext else: newname = name + ext p = self.m_parent n = p.m_files_lookup.get(newname, None) if not n: n = p.m_build_lookup.get(newname, None) if n: return n newnode = Node(newname, p) p.m_build_lookup[newnode.m_name] = newnode return newnode def bld_dir(self, env): "build path without the file name" return self.m_parent.bldpath(env) def bldbase(self, env): "build path without the extension: src/dir/foo(.cpp)" l = len(self.m_name) n = self.m_name while l > 0: l -= 1 if n[l] == '.': break s = n[:l] return os.path.join(self.bld_dir(env), s) def bldpath(self, env=None): "path seen from the build dir default/src/foo.cpp" x = self.m_parent.get_file(self.m_name) if x: return self.relpath_gen(Params.g_build.m_bldnode) if self.relpath(Params.g_build.m_srcnode) is not '': return os.path.join(env.variant(), self.relpath(Params.g_build.m_srcnode)) return env.variant() def srcpath(self, env): "path in the srcdir from the build dir ../src/foo.cpp" x = self.m_parent.get_build(self.m_name) if x: return self.bldpath(env) return self.relpath_gen(Params.g_build.m_bldnode) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/FamAdaptor.py0000664000175000017500000000407510771525014022257 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn 2006 """Fam WatchMonitor depends on python-fam ... it works with fam or gamin demon""" import select, errno try: import _fam except ImportError: support = False else: # check if fam runs and accepts connections test = _fam.open() test.close() test = None support = True class FamAdaptor: """fam helper class for use with DirWatcher""" def __init__( self, eventHandler ): """ creates the fam adaptor class @param eventHandler: callback method for event handling""" self.__fam = _fam.open() self.__eventHandler = eventHandler # callBack function self.__watchHandler = {} # {name : famId} def __del__( self ): if self.__fam: for handle in self.__watchHandler.keys(): self.stop_watch( handle ) self.__fam.close() def __check_fam(self): if self.__fam == None: raise "fam not init" def watch_directory( self, name, idxName ): self.__check_fam() if self.__watchHandler.has_key( name ): raise "dir allready watched" # set famId self.__watchHandler[name] = self.__fam.monitorDirectory( name, idxName ) return(self.__watchHandler[name]) def watch_file( self, name, idxName ): self.__check_fam() if self.__watchHandler.has_key( name ): raise "file allready watched" # set famId self.__watchHandler[name] = self.__fam.monitorFile( name, idxName ) return(self.__watchHandler[name]) def stop_watch( self, name ): self.__check_fam() if self.__watchHandler.has_key( name ): self.__watchHandler[name].cancelMonitor() del self.__watchHandler[name] return None def wait_for_event( self ): self.__check_fam() try: select.select( [self.__fam], [], [] ) except select.error, er: errnumber, strerr = er if errnumber != errno.EINTR: raise strerr def event_pending( self ): self.__check_fam() return self.__fam.pending() def handle_events( self ): self.__check_fam() fe = self.__fam.nextEvent() #pathName, event, idxName self.__eventHandler(fe.filename, fe.code2str(), fe.userData) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/GaminAdaptor.py0000664000175000017500000000524610771525014022610 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Oscar Blumberg 2006 (nael) # Matthias Jahn """Depends on python gamin and on gamin demon""" import select, errno try: import gamin except ImportError: support = False else: # check if gamin runs and accepts connections test = gamin.WatchMonitor() test.disconnect() test = None support = True class GaminAdaptor: """gamin helper class for use with DirWatcher""" def __init__( self, eventHandler ): """ creates the gamin wrapper @param eventHandler: callback method for event handling""" self.__gamin = gamin.WatchMonitor() self.__eventHandler = eventHandler # callBack function self.__watchHandler = {} # {name : famId} def __del__( self ): """clean remove""" if self.__gamin: for handle in self.__watchHandler.keys(): self.stop_watch( handle ) self.__gamin.disconnect() self.__gamin = None def __check_gamin(self): """is gamin connected""" if self.__gamin == None: raise "gamin not init" def __code2str( self, event ): """convert event numbers to string""" gaminCodes = { 1:"changed", 2:"deleted", 3:"StartExecuting", 4:"StopExecuting", 5:"created", 6:"moved", 7:"acknowledge", 8:"exists", 9:"endExist" } try: return gaminCodes[event] except KeyError: return "unknown" def __eventhandler_helper(self, pathName, event, idxName): """local eventhandler helps to convert event numbers to string""" self.__eventHandler(pathName, self.__code2str(event), idxName) def watch_directory( self, name, idxName ): self.__check_gamin() if self.__watchHandler.has_key( name ): raise "dir allready watched" # set gaminId self.__watchHandler[name] = self.__gamin.watch_directory( name, self.__eventhandler_helper, idxName ) return(self.__watchHandler[name]) def watch_file( self, name, idxName ): self.__check_gamin() if self.__watchHandler.has_key( name ): raise "file allready watched" # set famId self.__watchHandler[name] = self.__gamin.watch_directory( name, self.__eventhandler_helper, idxName ) return(self.__watchHandler[name]) def stop_watch( self, name ): self.__check_gamin() if self.__watchHandler.has_key( name ): self.__gamin.stop_watch(name) del self.__watchHandler[name] return None def wait_for_event( self ): self.__check_gamin() try: select.select([self.__gamin.get_fd()], [], []) except select.error, er: errnumber, strerr = er if errnumber != errno.EINTR: raise strerr def event_pending( self ): self.__check_gamin() return self.__gamin.event_pending() def handle_events( self ): self.__check_gamin() self.__gamin.handle_events() lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/0000755000175000017500000000000011703032664020757 5ustar alessioalessiolv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/gcc.py0000664000175000017500000001341310771525013022070 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006-2008 (ita) # Ralf Habacker, 2006 (rh) import os, optparse, sys import Params, Configure import ccroot, ar STOP = "stop" CONTINUE = "continue" """ Configuration issues: The first problem is that some exceptions are critical (compiler not found, ..) while others are not (the ar program is only needed for static libraries) The second problem is about the branching: how to extend the configuration functions without hard-coding the names and calling the functions A third problem is to reuse the code and not copy-paste everything each time a new compiler is added The refactoring will be performed in three steps: 1 the code will be split into small functions 2 the irrelevant variables will be eliminated 3 a stack-based system will be used for calling the configuration functions 4 the user logic will go into the error recovery (for example, making some errors non-fatal) Another solution to avoid an excessive amount of configuration variables is to create platform-specific methods, in this case the following problems must be solved first: attach functions dynamically to the c/c++ classes (without importing cxx.py or cc.py) """ def on_error(func_name, exc): if func_name == 'not_critical': env['foo'] = 'blah' return CONTINUE return STOP def eval_rules(conf, rules, err_handler): for x in rules: try: # TODO check pre/post conditions x(conf) except Exception, e: raise if err_handler(x.__name__, e) == STOP: break else: raise def find_cc(conf): v = conf.env cc = None if v['CC']: cc = v['CC'] elif 'CC' in os.environ: cc = os.environ['CC'] if not cc: cc = conf.find_program('gcc', var='CC') if not cc: cc = conf.find_program('cc', var='CC') if not cc: conf.fatal('gcc was not found') v['CC'] = cc def common_flags(conf): v = conf.env # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS _LIBDIRFLAGS _LIBFLAGS v['CC_SRC_F'] = '' v['CC_TGT_F'] = '-c -o ' v['CPPPATH_ST'] = '-I%s' # template for adding include paths # linker if not v['LINK_CC']: v['LINK_CC'] = v['CC'] v['CCLNK_SRC_F'] = '' v['CCLNK_TGT_F'] = '-o ' v['LIB_ST'] = '-l%s' # template for adding libs v['LIBPATH_ST'] = '-L%s' # template for adding libpaths v['STATICLIB_ST'] = '-l%s' v['STATICLIBPATH_ST'] = '-L%s' v['CCDEFINES_ST'] = '-D%s' v['SHLIB_MARKER'] = '-Wl,-Bdynamic' v['STATICLIB_MARKER'] = '-Wl,-Bstatic' # program v['program_PATTERN'] = '%s' # shared library v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] v['shlib_LINKFLAGS'] = ['-shared'] v['shlib_PATTERN'] = 'lib%s.so' # static lib v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] v['staticlib_PATTERN'] = 'lib%s.a' def modifier_win32(conf): v = conf.env v['program_PATTERN'] = '%s.exe' v['shlib_PATTERN'] = 'lib%s.dll' v['shlib_CCFLAGS'] = [] v['staticlib_LINKFLAGS'] = [] def modifier_cygwin(conf): v = conf.env v['program_PATTERN'] = '%s.exe' v['shlib_PATTERN'] = 'lib%s.dll' v['shlib_CCFLAGS'] = [] v['staticlib_LINKFLAGS'] = [] def modifier_darwin(conf): v = conf.env v['shlib_CCFLAGS'] = ['-fPIC'] v['shlib_LINKFLAGS'] = ['-dynamiclib'] v['shlib_PATTERN'] = 'lib%s.dylib' v['staticlib_LINKFLAGS'] = [] v['SHLIB_MARKER'] = '' v['STATICLIB_MARKER'] = '' def modifier_aix5(conf): v = conf.env v['program_LINKFLAGS'] = ['-Wl,-brtl'] v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull'] v['SHLIB_MARKER'] = '' def modifier_plugin(conf): v = conf.env # TODO this will disappear somehow # plugins. We handle them exactly as shlibs # everywhere except on osx, where we do bundles if sys.platform == 'darwin': v['plugin_LINKFLAGS'] = ['-bundle', '-undefined dynamic_lookup'] v['plugin_CCFLAGS'] = ['-fPIC'] v['plugin_PATTERN'] = '%s.bundle' else: v['plugin_CCFLAGS'] = v['shlib_CCFLAGS'] v['plugin_LINKFLAGS'] = v['shlib_LINKFLAGS'] v['plugin_PATTERN'] = v['shlib_PATTERN'] def modifier_debug(conf): v = conf.env # compiler debug levels if conf.check_flags('-O2'): v['CCFLAGS_OPTIMIZED'] = ['-O2'] v['CCFLAGS_RELEASE'] = ['-O2'] if conf.check_flags('-g -DDEBUG'): v['CCFLAGS_DEBUG'] = ['-g', '-DDEBUG'] if conf.check_flags('-g3 -O0 -DDEBUG'): v['CCFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] if conf.check_flags('-Wall'): for x in 'OPTIMIZED RELEASE DEBUG ULTRADEBUG'.split(): v.append_unique('CCFLAGS_'+x, '-Wall') try: debug_level = Params.g_options.debug_level.upper() except AttributeError: debug_level = ccroot.DEBUG_LEVELS.CUSTOM v.append_value('CCFLAGS', v['CCFLAGS_'+debug_level]) def detect(conf): # TODO FIXME later it will start from eval_rules # funcs = [find_cc, find_cpp, find_ar, common_flags, modifier_win32] #eval_rules(conf, funcs, on_error) find_cc(conf) ar.find_cpp(conf) ar.find_ar(conf) conf.check_tool('cc') common_flags(conf) if sys.platform == 'win32': modifier_win32(conf) elif sys.platform == 'cygwin': modifier_cygwin(conf) elif sys.platform == 'darwin': modifier_darwin(conf) elif sys.platform == 'aix5': modifier_aix5(conf) modifier_plugin(conf) conf.check_tool('checks') conf.check_features() modifier_debug(conf) conf.add_os_flags('CFLAGS', 'CCFLAGS') conf.add_os_flags('CPPFLAGS') conf.add_os_flags('LINKFLAGS') def set_options(opt): try: opt.add_option('-d', '--debug-level', action = 'store', default = ccroot.DEBUG_LEVELS.RELEASE, help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), choices = ccroot.DEBUG_LEVELS.ALL, dest = 'debug_level') except optparse.OptionConflictError: # the g++ tool might have added that option already pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/g++.py0000664000175000017500000001046210771525013021711 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) # Ralf Habacker, 2006 (rh) import os, optparse, sys import Params, Configure import ccroot, ar def find_cxx(conf): v = conf.env cc = None if v['CXX']: cc = v['CXX'] elif 'CXX' in os.environ: cc = os.environ['CXX'] if not cc: cc = conf.find_program('g++', var='CXX') if not cc: cc = conf.find_program('c++', var='CXX') if not cc: conf.fatal('g++ was not found') v['CXX'] = cc def common_flags(conf): v = conf.env # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS _LIBDIRFLAGS _LIBFLAGS v['CXX_SRC_F'] = '' v['CXX_TGT_F'] = '-c -o ' v['CPPPATH_ST'] = '-I%s' # template for adding include paths # linker if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] v['CXXLNK_SRC_F'] = '' v['CXXLNK_TGT_F'] = '-o ' v['LIB_ST'] = '-l%s' # template for adding libs v['LIBPATH_ST'] = '-L%s' # template for adding libpaths v['STATICLIB_ST'] = '-l%s' v['STATICLIBPATH_ST'] = '-L%s' v['CXXDEFINES_ST'] = '-D%s' v['SHLIB_MARKER'] = '-Wl,-Bdynamic' v['STATICLIB_MARKER'] = '-Wl,-Bstatic' # program v['program_PATTERN'] = '%s' # shared library v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] v['shlib_LINKFLAGS'] = ['-shared'] v['shlib_PATTERN'] = 'lib%s.so' # static lib v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] v['staticlib_PATTERN'] = 'lib%s.a' def modifier_win32(conf): v = conf.env v['program_PATTERN'] = '%s.exe' v['shlib_PATTERN'] = 'lib%s.dll' v['shlib_CXXFLAGS'] = [''] v['staticlib_LINKFLAGS'] = [''] def modifier_cygwin(conf): v = conf.env v['program_PATTERN'] = '%s.exe' v['shlib_PATTERN'] = 'lib%s.dll' v['shlib_CXXFLAGS'] = [''] v['staticlib_LINKFLAGS'] = [''] def modifier_darwin(conf): v = conf.env v['shlib_CXXFLAGS'] = ['-fPIC'] v['shlib_LINKFLAGS'] = ['-dynamiclib'] v['shlib_PATTERN'] = 'lib%s.dylib' v['staticlib_LINKFLAGS'] = [''] v['SHLIB_MARKER'] = '' v['STATICLIB_MARKER'] = '' def modifier_aix5(conf): v = conf.env v['program_LINKFLAGS'] = ['-Wl,-brtl'] v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull'] v['SHLIB_MARKER'] = '' def modifier_plugin(conf): v = conf.env # TODO this will disappear somehow # plugins. We handle them exactly as shlibs # everywhere except on osx, where we do bundles if sys.platform == 'darwin': v['plugin_LINKFLAGS'] = ['-bundle', '-undefined dynamic_lookup'] v['plugin_CXXFLAGS'] = ['-fPIC'] v['plugin_PATTERN'] = '%s.bundle' else: v['plugin_CXXFLAGS'] = v['shlib_CXXFLAGS'] v['plugin_LINKFLAGS'] = v['shlib_LINKFLAGS'] v['plugin_PATTERN'] = v['shlib_PATTERN'] def modifier_debug(conf): v = conf.env # compiler debug levels if conf.check_flags('-O2'): v['CXXFLAGS_OPTIMIZED'] = ['-O2'] v['CXXFLAGS_RELEASE'] = ['-O2'] if conf.check_flags('-g -DDEBUG'): v['CXXFLAGS_DEBUG'] = ['-g', '-DDEBUG'] if conf.check_flags('-g3 -O0 -DDEBUG'): v['CXXFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] if conf.check_flags('-Wall'): for x in 'OPTIMIZED RELEASE DEBUG ULTRADEBUG'.split(): v.append_unique('CXXFLAGS_'+x, '-Wall') try: debug_level = Params.g_options.debug_level.upper() except AttributeError: debug_level = ccroot.DEBUG_LEVELS.CUSTOM v.append_value('CXXFLAGS', v['CXXFLAGS_'+debug_level]) def detect(conf): find_cxx(conf) ar.find_cpp(conf) ar.find_ar(conf) conf.check_tool('cxx') common_flags(conf) if sys.platform == 'win32': modifier_win32(conf) elif sys.platform == 'cygwin': modifier_cygwin(conf) elif sys.platform == 'darwin': modifier_darwin(conf) elif sys.platform == 'aix5': modifier_aix5(conf) modifier_plugin(conf) conf.check_tool('checks') conf.check_features(kind='cpp') modifier_debug(conf) conf.add_os_flags('CXXFLAGS') conf.add_os_flags('CPPFLAGS') conf.add_os_flags('LINKFLAGS') def set_options(opt): try: opt.add_option('-d', '--debug-level', action = 'store', default = ccroot.DEBUG_LEVELS.RELEASE, help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), choices = ccroot.DEBUG_LEVELS.ALL, dest = 'debug_level') except optparse.OptionConflictError: # the gcc tool might have added that option already pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/suncc.py0000664000175000017500000000627210771525013022454 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) # Ralf Habacker, 2006 (rh) import os, optparse import Utils, Action, Params, Configure import ccroot, ar def find_cc(conf): v = conf.env cc = None if v['CC']: cc = v['CC'] elif 'CC' in os.environ: cc = os.environ['CC'] #if not cc: cc = conf.find_program('gcc', var='CC') if not cc: cc = conf.find_program('cc', var='CC') if not cc: conf.fatal('suncc was not found') v['CC'] = cc #TODO: Has anyone a better idea to check if this is a sun cc? ret = os.popen("%s -flags" % cc).close() if ret: conf.check_message('suncc', '', not ret) return def common_flags(conf): v = conf.env # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS _LIBDIRFLAGS _LIBFLAGS v['CC_SRC_F'] = '' v['CC_TGT_F'] = '-c -o ' v['CPPPATH_ST'] = '-I%s' # template for adding include paths # linker if not v['LINK_CC']: v['LINK_CC'] = v['CC'] v['CCLNK_SRC_F'] = '' v['CCLNK_TGT_F'] = '-o ' v['LIB_ST'] = '-l%s' # template for adding libs v['LIBPATH_ST'] = '-L%s' # template for adding libpaths v['STATICLIB_ST'] = '-l%s' v['STATICLIBPATH_ST'] = '-L%s' v['CCDEFINES_ST'] = '-D%s' v['SHLIB_MARKER'] = '-Bdynamic' v['STATICLIB_MARKER'] = '-Bstatic' # program v['program_PATTERN'] = '%s' # shared library v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] v['shlib_LINKFLAGS'] = ['-G'] v['shlib_PATTERN'] = 'lib%s.so' # static lib v['staticlib_LINKFLAGS'] = ['-Bstatic'] v['staticlib_PATTERN'] = 'lib%s.a' v['plugin_CCFLAGS'] = v['shlib_CCFLAGS'] v['plugin_LINKFLAGS'] = v['shlib_LINKFLAGS'] v['plugin_PATTERN'] = v['shlib_PATTERN'] def modifier_debug(conf): v = conf.env # compiler debug levels v['CCFLAGS'] = ['-O'] if conf.check_flags('-O2'): v['CCFLAGS_OPTIMIZED'] = ['-O2'] v['CCFLAGS_RELEASE'] = ['-O2'] if conf.check_flags('-g -DDEBUG'): v['CCFLAGS_DEBUG'] = ['-g', '-DDEBUG'] if conf.check_flags('-g3 -O0 -DDEBUG'): v['CCFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] # see the option below try: debug_level = Params.g_options.debug_level.upper() except AttributeError: debug_level = ccroot.DEBUG_LEVELS.CUSTOM v.append_value('CCFLAGS', v['CCFLAGS_'+debug_level]) def detect(conf): # TODO FIXME later it will start from eval_rules # funcs = [find_cc, find_cpp, find_ar, common_flags, modifier_win32] #eval_rules(conf, funcs, on_error) find_cc(conf) ar.find_cpp(conf) ar.find_ar(conf) conf.check_tool('cc') common_flags(conf) #modifier_plugin(conf) conf.check_tool('checks') conf.check_features() modifier_debug(conf) conf.add_os_flags('CFLAGS', 'CCFLAGS') conf.add_os_flags('CPPFLAGS') conf.add_os_flags('LINKFLAGS') def set_options(opt): try: opt.add_option('-d', '--debug-level', action = 'store', default = ccroot.DEBUG_LEVELS.RELEASE, help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), choices = ccroot.DEBUG_LEVELS.ALL, dest = 'debug_level') except optparse.OptionConflictError: # the sunc++ tool might have added that option already pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/compiler_cc.py0000664000175000017500000000336310771525013023616 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn , 2007 (pmarat) import os, sys, imp, types import optparse import Utils, Action, Params, checks, Configure def __list_possible_compiler(plattform): c_compiler = { 'win32': ['msvc', 'gcc'], 'cygwin': ['gcc'], 'darwin': ['gcc'], 'aix5': ['gcc'], 'linux': ['gcc', 'suncc'], 'sunos': ['suncc', 'gcc'], 'irix': ['gcc'], 'hpux': ['gcc'], 'default': ['gcc'] } try: return c_compiler[plattform] except KeyError: return c_compiler["default"] def detect(conf): try: test_for_compiler = Params.g_options.check_c_compiler except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cc')") for c_compiler in test_for_compiler.split(): conf.check_tool(c_compiler) if conf.env['CC']: conf.check_message("%s" %c_compiler, '', True) conf.env["COMPILER_CC"] = "%s" % c_compiler #store the choosed c compiler return conf.check_message("%s" %c_compiler, '', False) conf.env["COMPILER_CC"] = None def set_options(opt): detected_plattform = checks.detect_platform(None) possible_compiler_list = __list_possible_compiler(detected_plattform) test_for_compiler = str(" ").join(possible_compiler_list) cc_compiler_opts = opt.add_option_group("C Compiler Options") try: cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler, help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (detected_plattform, test_for_compiler), dest="check_c_compiler") except optparse.OptionConflictError: pass for c_compiler in test_for_compiler.split(): opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/ocaml.py0000664000175000017500000002700510772264157022443 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "ocaml support" import os, re import Params, Action, Object, Scan, Utils, Task from Params import error, fatal from Object import taskgen, feature, before, after, extension EXT_MLL = ['.mll'] EXT_MLY = ['.mly'] EXT_MLI = ['.mli'] EXT_MLC = ['.c'] EXT_ML = ['.ml'] open_re = re.compile('open ([a-zA-Z]+);;', re.M) def filter_comments(filename): f = open(filename, 'r') txt = f.read() f.close() buf = [] i = 0 max = len(txt) while i < max: c = txt[i] # skip a string if c == '"': i += 1 c = '' while i < max: p = c c = txt[i] i += 1 if i == max: return buf if c == '"': cnt = 0 while i < cnt and i < max: #print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt] if txt[i-2-cnt] == '\\': cnt+=1 else: break #print "cnt is ", str(cnt) if (cnt%2)==0: break # skip a char - unfortunately caml is a bit special t' elif c == "'": i += 1 if i == max: return buf c = txt[i] if c == '\\': i += 1 if i == max: return buf c = txt[i] if c == 'x': i += 2 # skip two chars i += 1 if i == max: return buf c = txt[i] #if c != '\'': print "uh-oh, invalid character" # skip a comment elif c == '(': if i == max: break c = txt[i+1] # eat (* *) comments if c == '*': i += 1 nesting = 1 prev = 0 while i < max: c = txt[i] if c == '*': prev = 1 elif c == ')' and prev: if prev: nesting -= 1 if nesting == 0: break elif c == '(': prev = 0 if i == max: return buf i += 1 c = txt[i] if c == '*': nesting += 1 else: prev = 0 i += 1 # a valid char, add it to the buffer else: buf.append(c) i += 1 return buf class ocaml_link(Task.Task): """link tasks in ocaml are special, the command line calculation must be postponed until having the dependencies on the compilation tasks, this means that we must produce the .ml files (lex, ..) to decide the order on which to link the files""" def __init__(self, action_name, env, normal=1): Task.Task.__init__(self, action_name, env, normal) def may_start(self): if not getattr(self, 'order', ''): # now reorder the m_inputs given the task dependencies if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks else: alltasks = self.obj.native_tasks # this part is difficult, we do not have a total order on the tasks # if the dependencies are wrong, this may not stop seen = [] pendant = []+alltasks while pendant: task = pendant.pop(0) if task in seen: continue for x in task.get_run_after(): if not x in seen: pendant.append(task) break else: seen.append(task) self.m_inputs = [x.m_outputs[0] for x in seen] self.order=1 return Task.Task.may_start(self) class ocaml_scanner(Scan.scanner): def __init__(self): Scan.scanner.__init__(self) def may_start(self, task): if getattr(task, 'flag_deps', ''): return 1 # the evil part is that we can only compute the dependencies after the # source files can be read (this means actually producing the source files) if getattr(task, 'bytecode', ''): alltasks = task.obj.bytecode_tasks else: alltasks = task.obj.native_tasks task.signature() # ensure that files are scanned - unfortunately tree = Params.g_build env = task.env() for node in task.m_inputs: lst = tree.m_depends_on[node.variant(env)][node] for depnode in lst: for t in alltasks: if t == task: continue if depnode in t.m_inputs: task.set_run_after(t) task.obj.flag_deps = 'ok' # TODO necessary to get the signature right - for now delattr(task, 'sign_all') task.signature() return 1 def scan(self, task, node): #print "scan is called" code = "".join(filter_comments(node.abspath(task.env()))) global open_re names=[] import_iterator = open_re.finditer(code) if import_iterator: for import_match in import_iterator: names.append(import_match.group(1)) found_lst = [] raw_lst = [] for name in names: nd = None for x in task.incpaths: nd = x.find_source(name.lower()+'.ml') if nd: found_lst.append(nd) break else: raw_lst.append(name) return (found_lst, raw_lst) g_caml_scanner = ocaml_scanner() def get_target_name(self, bytecode): if bytecode: if self.islibrary: return self.target+'.cma' else: return self.target+'.run' else: if self.m_type == 'c_object': return self.target+'.o' if self.islibrary: return self.target+'.cmxa' else: return self.target native_lst=['native', 'all', 'c_object'] bytecode_lst=['bytecode', 'all'] class ocaml_taskgen(Object.task_gen): s_default_ext = ['.mli', '.mll', '.mly', '.ml'] def __init__(self, type='all', library=0): Object.task_gen.__init__(self) self.m_type = type self.m_source = '' self.m_target = '' self.islibrary = library self._incpaths_lst = [] self._bld_incpaths_lst = [] self._mlltasks = [] self._mlytasks = [] self.mlitasks = [] self.native_tasks = [] self.bytecode_tasks = [] self.linktasks = [] self.bytecode_env = None self.native_env = None self.compiled_tasks = [] self.includes = '' self.uselib = '' self.out_nodes = [] self.are_deps_set = 0 if not self.env: self.env = Params.g_build.env() if not type in ['bytecode','native','all','c_object']: print 'type for camlobj is undefined '+type type='all' if type in native_lst: self.native_env = self.env.copy() self.native_env['OCAMLCOMP'] = self.native_env['OCAMLOPT'] self.native_env['OCALINK'] = self.native_env['OCAMLOPT'] if type in bytecode_lst: self.bytecode_env = self.env.copy() self.bytecode_env['OCAMLCOMP'] = self.bytecode_env['OCAMLC'] self.bytecode_env['OCALINK'] = self.bytecode_env['OCAMLC'] if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a' self.native_env['OCALINKFLAGS'] = '-a' if self.m_type == 'c_object': self.native_env['OCALINK'] = self.native_env['OCALINK']+' -output-obj' self.features.append('ocaml') Object.add_feature('ocaml', ['apply_core']) def apply_incpaths_ml(self): inc_lst = self.includes.split() lst = self._incpaths_lst tree = Params.g_build for dir in inc_lst: node = self.path.find_source(dir) if not node: error("node not found dammit") continue Params.g_build.rescan(node) if not node in lst: lst.append( node ) self._bld_incpaths_lst.append(node) # now the nodes are added to self._incpaths_lst def apply_vars_ml(self): for i in self._incpaths_lst: if self.bytecode_env: self.bytecode_env.append_value('OCAMLPATH', '-I %s' % i.srcpath(self.env)) self.bytecode_env.append_value('OCAMLPATH', '-I %s' % i.bldpath(self.env)) if self.native_env: self.native_env.append_value('OCAMLPATH', '-I %s' % i.bldpath(self.env)) self.native_env.append_value('OCAMLPATH', '-I %s' % i.srcpath(self.env)) varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT'] for name in self.uselib.split(): for vname in varnames: cnt = self.env[vname+'_'+name] if cnt: if self.bytecode_env: self.bytecode_env.append_value(vname, cnt) if self.native_env: self.native_env.append_value(vname, cnt) def apply_link_ml(self): if self.bytecode_env: linktask = ocaml_link('ocalink', self.bytecode_env) linktask.bytecode = 1 linktask.set_outputs(self.path.find_build(get_target_name(self, bytecode=1))) linktask.obj = self self.linktasks.append(linktask) if self.native_env: linktask = ocaml_link('ocalinkopt', self.native_env) linktask.set_outputs(self.path.find_build(get_target_name(self, bytecode=0))) linktask.obj = self self.linktasks.append(linktask) self.out_nodes += linktask.m_outputs # we produce a .o file to be used by gcc if self.m_type == 'c_object': self.compiled_tasks.append(linktask) def mll_hook(self, node): mll_task = self.create_task('ocamllex', self.native_env) mll_task.set_inputs(node) mll_task.set_outputs(node.change_ext('.ml')) self.mlltasks.append(mll_task) self.allnodes.append(mll_task.m_outputs[0]) def mly_hook(self, node): mly_task = self.create_task('ocamlyacc', self.native_env) mly_task.set_inputs(node) mly_task.set_outputs([node.change_ext('.ml'), node.change_ext('.mli')]) self._mlytasks.append(mly_task) self.allnodes.append(mly_task.m_outputs[0]) task = self.create_task('ocamlcmi', self.native_env) task.set_inputs(mly_task.m_outputs[1]) task.set_outputs(mly_task.m_outputs[1].change_ext('.cmi')) def mli_hook(self, node): task = self.create_task('ocamlcmi', self.native_env) task.set_inputs(node) task.set_outputs(node.change_ext('.cmi')) self.mlitasks.append(task) def mlc_hook(self, node): task = self.create_task('ocamlcc', self.native_env) task.set_inputs(node) task.set_outputs(node.change_ext('.o')) self.out_nodes += task.m_outputs def ml_hook(self, node): if self.native_env: task = self.create_task('ocaml', self.native_env) task.set_inputs(node) task.set_outputs(node.change_ext('.cmx')) task.m_scanner = g_caml_scanner task.obj = self task.incpaths = self._bld_incpaths_lst self.native_tasks.append(task) if self.bytecode_env: task = self.create_task('ocaml', self.bytecode_env) task.set_inputs(node) task.m_scanner = g_caml_scanner task.obj = self task.bytecode = 1 task.incpaths = self._bld_incpaths_lst task.set_outputs(node.change_ext('.cmo')) self.bytecode_tasks.append(task) Action.simple_action('ocaml', '${OCAMLCOMP} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', prio=60) Action.simple_action('ocalink', '${OCALINK} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', prio=99) Action.simple_action('ocalinkopt', '${OCALINK} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', prio=99) Action.simple_action('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', prio=40) Action.simple_action('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN', prio=60) Action.simple_action('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', prio=20) Action.simple_action('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bldbase(env)} ${SRC}', color='BLUE', prio=20) def detect(conf): opt = conf.find_program('ocamlopt', var='OCAMLOPT') occ = conf.find_program('ocamlc', var='OCAMLC') if (not opt) or (not occ): fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH') conf.env['OCAMLC'] = occ conf.env['OCAMLOPT'] = opt conf.env['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX') conf.env['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC') conf.env['OCAMLFLAGS'] = '' conf.env['OCALINK'] = '' conf.env['OCAMLLIB'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep conf.env['LIBPATH_OCAML'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep conf.env['CPPPATH_OCAML'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep conf.env['LIB_OCAML'] = 'camlrun' conf.env['OCALINKFLAGS'] = '' taskgen(apply_incpaths_ml) feature('ocaml')(apply_incpaths_ml) before('apply_vars_ml')(apply_incpaths_ml) taskgen(apply_vars_ml) feature('ocaml')(apply_vars_ml) before('apply_core')(apply_vars_ml) taskgen(apply_link_ml) feature('ocaml')(apply_link_ml) after('apply_core')(apply_link_ml) extension(EXT_MLL)(mll_hook) extension(EXT_MLY)(mly_hook) extension(EXT_MLI)(mli_hook) extension(EXT_MLC)(mlc_hook) extension(EXT_ML)(ml_hook) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/dang.py0000664000175000017500000000070210771525013022242 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "Demo: '.coin' files are converted into cpp files using 'cat': {.coin -> .cpp -> .o}" import Object Object.declare_chain( name = 'dang', action = '${DANG} ${SRC} > ${TGT}', ext_in = '.coin', ext_out = '.cpp' ) def detect(conf): dang = conf.find_program('cat', var='DANG') if not dang: conf.fatal('cannot find the program "cat"') lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/flex.py0000664000175000017500000000103210771525013022264 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # John O'Meara, 2006 # Thomas Nagy, 2006-2008 "Flex processing" import Object def decide_ext(self, node): if 'cxx' in self.features: return '.lex.cc' else: return '.lex.c' Object.declare_chain( name = 'flex', action = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}', ext_in = '.l', ext_out = decide_ext ) def detect(conf): flex = conf.find_program('flex', var='FLEX') if not flex: conf.fatal("flex was not found") v = conf.env v['FLEXFLAGS'] = '' lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/compiler_cxx.py0000664000175000017500000000342610772251134024034 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn , 2007 (pmarat) import os, sys, imp, types import optparse import Utils, Action, Params, checks, Configure def __list_possible_compiler(plattform): c_compiler = { 'win32': ['msvc', 'g++'], 'cygwin': ['g++'], 'darwin': ['g++'], 'aix5': ['g++'], 'linux': ['g++', 'sunc++'], 'sunos': ['sunc++', 'g++'], 'irix': ['g++'], 'hpux': ['g++'], 'default': ['g++'] } try: return(c_compiler[plattform]) except KeyError: return(c_compiler["default"]) def detect(conf): try: test_for_compiler = Params.g_options.check_cxx_compiler except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')") for cxx_compiler in test_for_compiler.split(): conf.check_tool(cxx_compiler) if conf.env['CXX']: conf.check_message("%s" %cxx_compiler, '', True) conf.env["COMPILER_CXX"] = "%s" %cxx_compiler #store the choosen c++ compiler return conf.check_message("%s" %cxx_compiler, '', False) conf.env["COMPILER_CXX"] = None def set_options(opt): detected_plattform = checks.detect_platform(None) possible_compiler_list = __list_possible_compiler(detected_plattform) test_for_compiler = str(" ").join(possible_compiler_list) cxx_compiler_opts = opt.add_option_group("C++ Compiler Options") try: cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler, help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (detected_plattform, test_for_compiler), dest="check_cxx_compiler") except optparse.OptionConflictError: pass for cxx_compiler in test_for_compiler.split(): opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/osx.py0000664000175000017500000000462110771525013022146 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 """MacOSX related tools To compile an executable into a Mac application bundle, set its 'mac_app' attribute to a True value: obj.mac_app = True """ import os, shutil import Object, Action from Object import taskgen, feature, after, before from Params import error, debug, fatal, warning def create_task_macapp(self): if self.m_type == 'program' and self.link_task: apptask = self.create_task('macapp', self.env) apptask.set_inputs(self.link_task.m_outputs) apptask.set_outputs(self.link_task.m_outputs[0].change_ext('.app')) self.m_apptask = apptask def apply_link_osx(self): """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications or use obj.mac_app = True to build specific targets as Mac apps""" if self.env['MACAPP'] or getattr(self, 'mac_app', False): self.create_task_macapp() app_dirs = ['Contents', os.path.join('Contents','MacOS'), os.path.join('Contents','Resources')] app_info = ''' CFBundlePackageType APPL CFBundleGetInfoString Created by Waf CFBundleSignature ???? NOTE THIS IS A GENERATED FILE, DO NOT MODIFY CFBundleExecutable %s ''' def app_build(task): global app_dirs env = task.env() i = 0 for p in task.m_outputs: srcfile = p.srcpath(env) debug("creating directories") try: os.mkdir(srcfile) [os.makedirs(os.path.join(srcfile, d)) for d in app_dirs] except (OSError, IOError): pass # copy the program to the contents dir srcprg = task.m_inputs[i].srcpath(env) dst = os.path.join(srcfile, 'Contents', 'MacOS') debug("copy %s to %s" % (srcprg, dst)) shutil.copy(srcprg, dst) # create info.plist debug("generate Info.plist") # TODO: Support custom info.plist contents. f = file(os.path.join(srcfile, "Contents", "Info.plist"), "w") f.write(app_info % os.path.basename(srcprg)) f.close() i += 1 return 0 x = Action.Action('macapp', vars=[], func=app_build) x.prio = 300 taskgen(create_task_macapp) taskgen(apply_link_osx) after('apply_link')(apply_link_osx) feature('cc')(apply_link_osx) feature('cxx')(apply_link_osx) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/cxx.py0000664000175000017500000000756410772264157022162 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Base for c++ programs and libraries" import sys import Object, Params, Action, Utils from Params import debug, fatal import ccroot # <- do not remove from Object import taskgen, before, extension g_cpp_flag_vars = [ 'FRAMEWORK', 'FRAMEWORKPATH', 'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', 'INCLUDE', 'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES'] "main cpp variables" EXT_CXX = ['.cpp', '.cc', '.cxx', '.C'] CXX_METHS = ['init_cxx', 'apply_type_vars', 'apply_incpaths', 'apply_dependencies', 'apply_defines_cxx', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cxx'] Object.add_feature('cxx', CXX_METHS) # TODO get rid of that class g_cpp_type_vars=['CXXFLAGS', 'LINKFLAGS'] class cpp_taskgen(ccroot.ccroot_abstract): def __init__(self, type='program', subtype=None): ccroot.ccroot_abstract.__init__(self, type, subtype) self.m_type_initials = 'cpp' self.cxxflags='' self.cppflags='' self.features.append('cxx') def init_cxx(self): self.mappings['.c'] = Object.task_gen.mappings['.cxx'] if hasattr(self, 'p_flag_vars'): self.p_flag_vars = set(self.p_flag_vars).union(g_cpp_flag_vars) else: self.p_flag_vars = g_cpp_flag_vars if hasattr(self, 'p_type_vars'): self.p_type_vars = set(self.p_type_vars).union(g_cpp_type_vars) else: self.p_type_vars = g_cpp_type_vars def apply_obj_vars_cxx(self): debug('apply_obj_vars_cxx', 'ccroot') env = self.env app = self.env.append_unique cpppath_st = self.env['CPPPATH_ST'] self.addflags('CXXFLAGS', self.cxxflags) # local flags come first # set the user-defined includes paths for i in self.bld_incpaths_lst: app('_CXXINCFLAGS', cpppath_st % i.bldpath(env)) app('_CXXINCFLAGS', cpppath_st % i.srcpath(env)) # set the library include paths for i in self.env['CPPPATH']: app('_CXXINCFLAGS', cpppath_st % i) #print self.env['_CXXINCFLAGS'] #print " appending include ",i # this is usually a good idea app('_CXXINCFLAGS', cpppath_st % '.') app('_CXXINCFLAGS', cpppath_st % self.env.variant()) tmpnode = Params.g_build.m_curdirnode app('_CXXINCFLAGS', cpppath_st % tmpnode.bldpath(env)) app('_CXXINCFLAGS', cpppath_st % tmpnode.srcpath(env)) def apply_defines_cxx(self): tree = Params.g_build lst = self.to_list(self.defines)+self.to_list(self.env['CXXDEFINES']) milst = [] # now process the local defines for defi in lst: if not defi in milst: milst.append(defi) # CXXDEFINES_USELIB libs = self.to_list(self.uselib) for l in libs: val = self.env['CXXDEFINES_'+l] if val: milst += self.to_list(val) self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] y = self.env['CXXDEFINES_ST'] self.env['_CXXDEFFLAGS'] = [y%x for x in milst] def cxx_hook(self, node): # create the compilation task: cpp or cc task = self.create_task('cpp', self.env) try: obj_ext = self.obj_ext except AttributeError: obj_ext = '_%s.o' % self.m_type[:2] task.m_scanner = ccroot.g_c_scanner task.path_lst = self.inc_paths task.defines = self.scanner_defines task.m_inputs = [node] task.m_outputs = [node.change_ext(obj_ext)] self.compiled_tasks.append(task) cpp_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT} ${LINKFLAGS} ${_LIBDIRFLAGS} ${_LIBFLAGS}' Action.simple_action('cpp', cpp_str, color='GREEN', prio=100) Action.simple_action('cpp_link', link_str, color='YELLOW', prio=111) Object.declare_order('apply_dependencies', 'apply_defines_cxx', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cxx', 'apply_obj_vars') taskgen(init_cxx) before('apply_type_vars')(init_cxx) taskgen(apply_obj_vars_cxx) taskgen(apply_defines_cxx) extension(EXT_CXX)(cxx_hook) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/sconpat.py0000664000175000017500000000316110771525013023002 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006-2008 (ita) try: from hashlib import md5 except ImportError: from md5 import md5 import Utils, Configure, Action, Task, Params from Params import error, fatal class sconpat_error(Exception): pass class Builder_class(object): def __init__(self): self.action = None self.generator = None def init(self, **kw): if kw.has_key('generator') and kw.has_key('action'): raise sconpat_error, 'do not mix action and generator in a builder' if kw.has_key('action'): a = kw['action'].replace('$SOURCES', '${SRC}') a = a.replace('$TARGETS', '${TGT}') a = a.replace('$TARGET', '${TGT[0].abspath(env)}') a = a.replace('$SOURCE', '${SRC[0].abspath(env)}') m = md5() m.update(a) key = m.hexdigest() Action.simple_action(key, a, kw.get('color', 'GREEN')) self.action=key def apply(self, target, source, **kw): #print "Builder_class apply called" #print kw['env'] #print target #print source curdir = Params.g_build.m_curdirnode t = Task.Task(self.action, kw['env'], 10) t.set_inputs(curdir.find_source(source, create=1)) t.set_outputs(curdir.find_build(target, create=1)) def Builder(**kw): ret = Builder_class() ret.init(**kw) return ret def Environment(**kw): import Environment ret = Environment.Environment() if kw.has_key('BUILDERS'): bd = kw['BUILDERS'] for k in bd: # store the builder name on the builder bd[k].name = k def miapply(self, *lst, **kw): if not 'env' in kw: kw['env']=ret bd[k].apply(*lst, **kw) ret.__class__.__dict__[k]=miapply return ret lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/checks.py0000664000175000017500000001577410771525013022610 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "Additional configuration checks hooked on the configuration class" import Utils, Configure, config_c from Params import error, fatal endian_str = ''' #include int is_big_endian() { long one = 1; return !(*((char *)(&one))); } int main() { if (is_big_endian()) printf("bigendian=1\\n"); else printf("bigendian=0\\n"); return 0; } ''' class compile_configurator(config_c.configurator_base): "inheritance demo" def __init__(self, conf): config_c.configurator_base.__init__(self, conf) self.name = '' self.code = '' self.flags = '' self.define = '' self.uselib = '' self.want_message = 0 self.msg = '' self.force_compiler = None def error(self): fatal('test program would not run') def run_cache(self, retval): if self.want_message: self.conf.check_message('compile code (cached)', '', not (retval is False), option=self.msg) def validate(self): if not self.code: fatal('test configurator needs code to compile and run!') def run_test(self): obj = config_c.check_data() obj.code = self.code obj.env = self.env obj.uselib = self.uselib obj.flags = self.flags if self.force_compiler: obj.force_compiler = self.force_compiler ret = self.conf.run_check(obj) if self.want_message: self.conf.check_message('compile code', '', not (ret is False), option=self.msg) return ret def create_compile_configurator(self): return compile_configurator(self) def checkEndian(self, define='', pathlst=[]): if define == '': define = 'IS_BIGENDIAN' if self.is_defined(define): return self.get_define(define) global endian test = self.create_test_configurator() test.code = endian_str code = test.run()['result'] t = Utils.to_hashtable(code) try: is_big = int(t['bigendian']) except KeyError: raise Configure.ConfigurationError('endian test failed '+code) if is_big: strbig = 'big endian' else: strbig = 'little endian' self.check_message_custom('endianness', '', strbig) self.define_cond(define, is_big) return is_big features_str = ''' #include int is_big_endian() { long one = 1; return !(*((char *)(&one))); } int main() { if (is_big_endian()) printf("bigendian=1\\n"); else printf("bigendian=0\\n"); printf("int_size=%d\\n", sizeof(int)); printf("long_int_size=%d\\n", sizeof(long int)); printf("long_long_int_size=%d\\n", sizeof(long long int)); printf("double_size=%d\\n", sizeof(double)); return 0; } ''' def checkFeatures(self, lst=[], pathlst=[]): global endian test = self.create_test_configurator() test.code = features_str code = test.run()['result'] t = Utils.to_hashtable(code) try: is_big = int(t['bigendian']) except KeyError: raise Configure.ConfigurationError('endian test failed '+code) if is_big: strbig = 'big endian' else: strbig = 'little endian' self.check_message_custom('endianness', '', strbig) self.check_message_custom('int size', '', t['int_size']) self.check_message_custom('long int size', '', t['long_int_size']) self.check_message_custom('long long int size', '', t['long_long_int_size']) self.check_message_custom('double size', '', t['double_size']) self.define_cond('IS_BIGENDIAN', is_big) self.define_cond('INT_SIZE', int(t['int_size'])) self.define_cond('LONG_INT_SIZE', int(t['long_int_size'])) self.define_cond('LONG_LONG_INT_SIZE', int(t['long_long_int_size'])) self.define_cond('DOUBLE_SIZE', int(t['double_size'])) return is_big def detect_platform(self): """adapted from scons""" import os, sys if os.name == 'posix': if sys.platform == 'cygwin': return 'cygwin' if str.find(sys.platform, 'linux') != -1: return 'linux' if str.find(sys.platform, 'irix') != -1: return 'irix' if str.find(sys.platform, 'sunos') != -1: return 'sunos' if str.find(sys.platform, 'hp-ux') != -1: return 'hpux' if str.find(sys.platform, 'aix') != -1: return 'aix' if str.find(sys.platform, 'darwin') != -1: return 'darwin' return 'posix' elif os.name == 'os2': return 'os2' elif os.name == 'java': return 'java' else: return sys.platform def find_header(self, header, define='', paths=''): if not define: define = 'HAVE_' + header.upper().replace('/', '_').replace('.', '_') test = self.create_header_enumerator() test.mandatory = 1 test.name = header test.path = paths test.define = define return test.run() def check_header(self, header, define='', mandatory=0): if not define: define = 'HAVE_' + header.upper().replace('/', '_').replace('.', '_') test = self.create_header_configurator() test.name = header test.define = define test.mandatory = mandatory return test.run() def try_build_and_exec(self, code, uselib=''): test = self.create_test_configurator() test.uselib = uselib test.code = code ret = test.run() if ret: return ret['result'] return None def try_build(self, code, uselib='', msg='', force_compiler = ''): test = self.create_compile_configurator() test.uselib = uselib test.code = code if force_compiler: test.force_compiler = force_compiler if msg: test.want_message = 1 test.msg = msg ret = test.run() return ret def check_flags(self, flags, uselib='', options='', msg=1): test = self.create_test_configurator() test.uselib = uselib test.code = 'int main() {return 0;}\n' test.flags = flags ret = test.run() if msg: self.check_message('flags', flags, not (ret is False)) if ret: return 1 return None # function wrappers for convenience def check_header2(self, name, mandatory=1, define=''): import os ck_hdr = self.create_header_configurator() if define: ck_hdr.define = define # header provides no fallback for define: else: ck_hdr.define = 'HAVE_' + os.path.basename(name).replace('.','_').upper() ck_hdr.mandatory = mandatory ck_hdr.name = name return ck_hdr.run() def check_library2(self, name, mandatory=1, uselib=''): ck_lib = self.create_library_configurator() if uselib: ck_lib.uselib = uselib ck_lib.mandatory = mandatory ck_lib.name = name return ck_lib.run() def check_pkg2(self, name, version, mandatory=1, uselib=''): ck_pkg = self.create_pkgconfig_configurator() if uselib: ck_pkg.uselib = uselib ck_pkg.mandatory = mandatory ck_pkg.version = version ck_pkg.name = name return ck_pkg.run() def check_cfg2(self, name, mandatory=1, define='', uselib=''): ck_cfg = self.create_cfgtool_configurator() if uselib: ck_cfg.uselib = uselib # cfgtool provides no fallback for uselib: else: ck_cfg.uselib = name.upper() ck_cfg.mandatory = mandatory ck_cfg.binary = name + '-config' return ck_cfg.run() def detect(conf): "attach the checks to the conf object" conf.hook(find_header) conf.hook(check_header) conf.hook(create_compile_configurator) conf.hook(try_build) conf.hook(try_build_and_exec) conf.hook(check_flags) # additional methods conf.hook(check_header2) conf.hook(check_library2) conf.hook(check_pkg2) conf.hook(check_cfg2) # the point of checkEndian is to make an example, the following is better # if sys.byteorder == "little": conf.hook(checkEndian) conf.hook(checkFeatures) conf.hook(detect_platform) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/libtool.py0000664000175000017500000002321610771537440023011 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn, 2008, jahn matthias ath freenet punto de # Thomas Nagy, 2008 (ita) import sys, re, os, optparse import Action, Object, Params, Scan, Common, Utils, preproc from Params import error, debug, fatal, warning from ccroot import ccroot from Object import taskgen, after, before REVISION="0.1.3" """ if you want to use the code here, you must add the following two methods: * apply_libtool * apply_link_libtool To do so, use a code similar to the following: obj = obj.create(...) obj.want_libtool = 1 obj.meths.update(['apply_libtool', 'apply_link_libtool']) """ # fake libtool files fakelibtool_vardeps = ['CXX', 'PREFIX'] def fakelibtool_build(task): # Writes a .la file, used by libtool env = task.env() dest = open(task.m_outputs[0].abspath(env), 'w') sname = task.m_inputs[0].m_name fu = dest.write fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n") if env['vnum']: nums = env['vnum'].split('.') libname = task.m_inputs[0].m_name name3 = libname+'.'+env['vnum'] name2 = libname+'.'+nums[0] name1 = libname fu("dlname='%s'\n" % name2) strn = " ".join([name3, name2, name1]) fu("library_names='%s'\n" % (strn) ) else: fu("dlname='%s'\n" % sname) fu("library_names='%s %s %s'\n" % (sname, sname, sname) ) fu("old_library=''\n") vars = ' '.join(env['libtoolvars']+env['LINKFLAGS']) fu("dependency_libs='%s'\n" % vars) fu("current=0\n") fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n") fu("dlopen=''\ndlpreopen=''\n") fu("libdir='%s/lib'\n" % env['PREFIX']) dest.close() return 0 def read_la_file(path): sp = re.compile(r'^([^=]+)=\'(.*)\'$') dc={} file = open(path, "r") for line in file.readlines(): try: #print sp.split(line.strip()) _, left, right, _ = sp.split(line.strip()) dc[left]=right except ValueError: pass file.close() return dc def apply_link_libtool(self): if not getattr(self, 'want_libtool', 0): return if self.m_type != 'program': linktask = self.link_task latask = self.create_task('fakelibtool', self.env) latask.set_inputs(linktask.m_outputs) latask.set_outputs(linktask.m_outputs[0].change_ext('.la')) self.m_latask = latask if not (Params.g_commands['install'] or Params.g_commands['uninstall']): return self.install_results(dest_var, dest_subdir, self.m_latask) def apply_libtool(self): if getattr(self, 'want_libtool', 0) <= 0: return self.env['vnum']=self.vnum paths=[] libs=[] libtool_files=[] libtool_vars=[] for l in self.env['LINKFLAGS']: if l[:2]=='-L': paths.append(l[2:]) elif l[:2]=='-l': libs.append(l[2:]) for l in libs: for p in paths: dict = read_la_file(p+'/lib'+l+'.la') linkflags2 = dict.get('dependency_libs', '') for v in linkflags2.split(): if v.endswith('.la'): libtool_files.append(v) libtool_vars.append(v) continue self.env.append_unique('LINKFLAGS', v) break self.env['libtoolvars']=libtool_vars while libtool_files: file = libtool_files.pop() dict = read_la_file(file) for v in dict['dependency_libs'].split(): if v[-3:] == '.la': libtool_files.append(v) continue self.env.append_unique('LINKFLAGS', v) Action.Action('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', prio=200) class libtool_la_file: def __init__ (self, la_filename): self.__la_filename = la_filename #remove path and .la suffix self.linkname = str(os.path.split(la_filename)[-1])[:-3] if self.linkname.startswith("lib"): self.linkname = self.linkname[3:] # The name that we can dlopen(3). self.dlname = None # Names of this library self.library_names = None # The name of the static archive. self.old_library = None # Libraries that this one depends upon. self.dependency_libs = None # Version information for libIlmImf. self.current = None self.age = None self.revision = None # Is this an already installed library? self.installed = None # Should we warn about portability when linking against -modules? self.shouldnotlink = None # Files to dlopen/dlpreopen self.dlopen = None self.dlpreopen = None # Directory that this library needs to be installed in: self.libdir = '/usr/lib' if not self.__parse(): raise "file %s not found!!" %(la_filename) def __parse(self): "Retrieve the variables from a file" if not os.path.isfile(self.__la_filename): return 0 la_file=open(self.__la_filename, 'r') for line in la_file: ln = line.strip() if not ln: continue if ln[0]=='#': continue (key, value) = str(ln).split('=', 1) key = key.strip() value = value.strip() if value == "no": value = False elif value == "yes": value = True else: try: value = int(value) except ValueError: value = value.strip("'") setattr(self, key, value) la_file.close() return 1 def get_libs(self): """return linkflags for this lib""" libs = [] if self.dependency_libs: libs = str(self.dependency_libs).strip().split() if libs == None: libs = [] # add la lib and libdir libs.insert(0, "-l%s" % self.linkname.strip()) libs.insert(0, "-L%s" % self.libdir.strip()) return libs def __str__(self): return '''\ dlname = "%(dlname)s" library_names = "%(library_names)s" old_library = "%(old_library)s" dependency_libs = "%(dependency_libs)s" version = %(current)s.%(age)s.%(revision)s installed = "%(installed)s" shouldnotlink = "%(shouldnotlink)s" dlopen = "%(dlopen)s" dlpreopen = "%(dlpreopen)s" libdir = "%(libdir)s"''' % self.__dict__ class libtool_config: def __init__ (self, la_filename): self.__libtool_la_file = libtool_la_file(la_filename) tmp = self.__libtool_la_file self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)] self.__sub_la_files = [] self.__sub_la_files.append(la_filename) self.__libs = None def __cmp__(self, other): """make it compareable with X.Y.Z versions (Y and Z are optional)""" if not other: return 1 othervers = [int(s) for s in str(other).split(".")] selfvers = self.__version if selfvers > othervers: return 1 if selfvers < othervers: return -1 return 0 def __str__(self): return "\n".join([ str(self.__libtool_la_file), ' '.join(self.__libtool_la_file.get_libs()), '* New getlibs:', ' '.join(self.get_libs()) ]) def __get_la_libs(self, la_filename): return libtool_la_file(la_filename).get_libs() def get_libs(self): """return the complete uniqe linkflags that do not contain .la files anymore""" libs_list = list(self.__libtool_la_file.get_libs()) libs_map = {} while len(libs_list) > 0: entry = libs_list.pop(0) if entry: if str(entry).endswith(".la"): ## prevents duplicate .la checks if entry not in self.__sub_la_files: self.__sub_la_files.append(entry) libs_list.extend(self.__get_la_libs(entry)) else: libs_map[entry]=1 self.__libs = libs_map.keys() return self.__libs def get_libs_only_L(self): if not self.__libs: self.get_libs() libs = self.__libs libs = filter(lambda s: str(s).startswith('-L'), libs) return libs def get_libs_only_l(self): if not self.__libs: self.get_libs() libs = self.__libs libs = filter(lambda s: str(s).startswith('-l'), libs) return libs def get_libs_only_other(self): if not self.__libs: self.get_libs() libs = self.__libs libs = filter(lambda s: not (str(s).startswith('-L') or str(s).startswith('-l')), libs) return libs def useCmdLine(): """parse cmdline args and control build""" usage = '''Usage: %prog [options] PathToFile.la example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la nor: %prog --libs /usr/lib/libamarok.la''' parser = optparse.OptionParser(usage) a = parser.add_option a("--version", dest = "versionNumber", action = "store_true", default = False, help = "output version of libtool-config" ) a("--debug", dest = "debug", action = "store_true", default = False, help = "enable debug" ) a("--libs", dest = "libs", action = "store_true", default = False, help = "output all linker flags" ) a("--libs-only-l", dest = "libs_only_l", action = "store_true", default = False, help = "output -l flags" ) a("--libs-only-L", dest = "libs_only_L", action = "store_true", default = False, help = "output -L flags" ) a("--libs-only-other", dest = "libs_only_other", action = "store_true", default = False, help = "output other libs (e.g. -pthread)" ) a("--atleast-version", dest = "atleast_version", default=None, help = "return 0 if the module is at least version ATLEAST_VERSION" ) a("--exact-version", dest = "exact_version", default=None, help = "return 0 if the module is exactly version EXACT_VERSION" ) a("--max-version", dest = "max_version", default=None, help = "return 0 if the module is at no newer than version MAX_VERSION" ) (options, args) = parser.parse_args() if len(args) != 1 and not options.versionNumber: parser.error("incorrect number of arguments") if options.versionNumber: print "libtool-config version %s" % REVISION return 0 ltf = libtool_config(args[0]) if options.debug: print(ltf) if options.atleast_version: if ltf >= options.atleast_version: return 0 sys.exit(1) if options.exact_version: if ltf == options.exact_version: return 0 sys.exit(1) if options.max_version: if ltf <= options.max_version: return 0 sys.exit(1) def p(x): print " ".join(x) if options.libs: p(ltf.get_libs()) elif options.libs_only_l: p(ltf.get_libs_only_l()) elif options.libs_only_L: p(ltf.get_libs_only_L()) elif options.libs_only_other: p(ltf.get_libs_only_other()) return 0 if __name__ == '__main__': useCmdLine() taskgen(apply_link_libtool) after('apply_link')(apply_link_libtool) taskgen(apply_libtool) before('apply_core')(apply_libtool) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/swig.py0000664000175000017500000000757110772264157022327 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: UTF-8 # Petar Forai # Thomas Nagy print """ WARNING: You are using the swig tool! This tool is marked as beeing deprecated! use with caution! it is not maintained actively """ import re import Action, Scan, Params, Object from Params import fatal swig_str = '${SWIG} ${SWIGFLAGS} -o ${TGT[0].bldpath(env)} ${SRC}' re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M) re_2 = re.compile('%include "(.*)"', re.M) re_3 = re.compile('#include "(.*)"', re.M) class swig_class_scanner(Scan.scanner): def __init__(self): Scan.scanner.__init__(self) def scan(self, task, node): env = task.m_env variant = node.variant(env) tree = Params.g_build lst_names = [] lst_src = [] # read the file fi = open(node.abspath(env), 'r') content = fi.read() fi.close() # module name, only for the .swig file names = re_1.findall(content) if names: lst_names.append(names[0]) # find .i files (and perhaps .h files) names = re_2.findall(content) for n in names: u = node.m_parent.find_source(n) if u: lst_src.append(u) # find project headers names = re_3.findall(content) for n in names: u = node.m_parent.find_source(n) if u: lst_src.append(u) # list of nodes this one depends on, and module name if present #print "result of ", node, lst_src, lst_names return (lst_src, lst_names) swig_scanner = swig_class_scanner() def i_file(self, node): ext = '.swigwrap.c' if self.__class__.__name__ == 'cpp_taskgen': ext = '.swigwrap.cc' variant = node.variant(self.env) ltask = self.create_task('swig') ltask.set_inputs(node) tree = Params.g_build def check_rec(task, node_): for j in tree.m_depends_on[0][node_]: if j.m_name.endswith('.i'): check_rec(task, j) check_rec(ltask, node) # get the name of the swig module to process try: modname = Params.g_build.m_raw_deps[0][node][0] except KeyError: return # set the output files outs = [node.change_ext(ext)] # swig generates a python file in python mode TODO: other modes ? if '-python' in self.env['SWIGFLAGS']: outs.append(node.m_parent.find_build(modname+'.py')) elif '-ocaml' in self.env['SWIGFLAGS']: outs.append(node.m_parent.find_build(modname+'.ml')) outs.append(node.m_parent.find_build(modname+'.mli')) ltask.set_outputs(outs) # create the build task (c or cpp) task = self.create_task(self.m_type_initials) task.set_inputs(ltask.m_outputs[0]) task.set_outputs(node.change_ext('.swigwrap.os')) Action.simple_action('swig', swig_str, color='BLUE', prio=40) # register the hook for use with cpp and cc task generators try: Object.hook('cpp', 'SWIG_EXT', i_file) except KeyError: pass try: Object.hook('cc', 'SWIG_EXT', i_file) except KeyError: pass def check_swig_version(conf, minver=None): """Check for a minimum swig version like conf.check_swig_version("1.3.28") or conf.check_swig_version((1,3,28)) """ import pproc as subprocess reg_swig = re.compile(r'SWIG Version\s(.*)', re.M) proc = subprocess.Popen([conf.env['SWIG'], "-version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) swig_out = proc.communicate()[0] swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split(".")] if isinstance(minver, basestring): minver = [int(s) for s in minver.split(".")] if isinstance(minver, tuple): minver = [int(s) for s in minver] result = (minver is None) or (minver[:3] <= swigver[:3]) swigver_full = '.'.join(map(str, swigver)) if result: conf.env['SWIG_VERSION'] = swigver_full minver_str = '.'.join(map(str, minver)) if minver is None: conf.check_message_custom('swig version', '', swigver_full) else: conf.check_message('swig version', ">= %s" % (minver_str,), result, option=swigver_full) return result def detect(conf): swig = conf.find_program('swig', var='SWIG') env = conf.env env['SWIG'] = swig env['SWIGFLAGS'] = '' env['SWIG_EXT'] = ['.swig'] conf.hook(check_swig_version) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/kde4.py0000664000175000017500000000441410772264157022176 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) import os, sys, re, Object, Action, Utils, Common class msgfmt_taskgen(Object.task_gen): def __init__(self, appname='set_your_app_name'): Object.task_gen.__init__(self) self.langs = '' # for example "foo/fr foo/br" self.chmod = 0644 self.inst_var = 'KDE4_LOCALE_INSTALL_DIR' self.appname = appname def apply(self): for lang in self.to_list(self.langs): node = self.path.find_source_lst(Utils.split_path(lang+'.po')) task = self.create_task('msgfmt', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('.mo')) if not Params.g_install: continue langname = lang.split('/') langname = langname[-1] inst_dir = langname+os.sep+'LC_MESSAGES' task.install = {'var':self.inst_var,'dir':inst_dir+'/','as':self.appname+'.mo','chmod':self.chmod} def detect(conf): kdeconfig = conf.find_program('kde4-config') if not kdeconfig: conf.fatal('we need kde4-config') prefix = os.popen('%s --prefix' % kdeconfig).read().strip() file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix try: os.stat(file) except OSError: file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix try: os.stat(file) except: conf.fatal('could not open %s' % file) try: f = open(file, 'r') txt = f.read() f.close() except (OSError, IOError): conf.fatal('could not read %s' % file) txt = txt.replace('\\\n', '\n') fu = re.compile('#(.*)\n') txt = fu.sub('', txt) setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') found = setregexp.findall(txt) for (_, key, val) in found: #print key, val conf.env[key] = val # well well, i could just write an interpreter for cmake files conf.env['LIB_KDECORE']='kdecore' conf.env['LIB_KDEUI'] ='kdeui' conf.env['LIB_KIO'] ='kio' conf.env['LIB_KHTML'] ='khtml' conf.env['LIB_KPARTS'] ='kparts' conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR'] conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR'] conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE") conf.env['MSGFMT'] = conf.find_program('msgfmt') Action.simple_action('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', prio=10) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/winres.py0000664000175000017500000000316110772264157022654 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Brant Young, 2007 "This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}" import os, sys import Action, Object from Utils import quote_whitespace from Object import extension EXT_WINRC = ['.rc'] winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CXXDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CXXINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F}${TGT} ${WINRC_SRC_F}${SRC}' def rc_file(self, node): obj_ext = '.rc.o' if self.env['WINRC_TGT_F'] == '/fo ': obj_ext = '.res' rctask = self.create_task('winrc') rctask.set_inputs(node) rctask.set_outputs(node.change_ext(obj_ext)) # make linker can find compiled resource files self.compiled_tasks.append(rctask) # create our action, for use with rc file Action.simple_action('winrc', winrc_str, color='BLUE', prio=40) def detect(conf): v = conf.env cc = os.path.basename(''.join(v['CC']).lower()) cxx = os.path.basename(''.join(v['CXX']).lower()) # TODO ugly if cc.find('gcc')>-1 or cc.find('cc')>-1 or cxx.find('g++')>-1 or cxx.find('c++')>-1: # find windres while use gcc toolchain winrc = conf.find_program('windres', var='WINRC') v['WINRC_TGT_F'] = '-o ' v['WINRC_SRC_F'] = '-i ' elif cc.find('cl.exe')>-1 or cxx.find('cl.exe')>-1 : # find rc.exe while use msvc winrc = conf.find_program('RC', var='WINRC') v['WINRC_TGT_F'] = '/fo ' v['WINRC_SRC_F'] = ' ' else: return 0 if not winrc: conf.fatal('winrc was not found!!') else: v['WINRC'] = quote_whitespace(winrc) v['WINRCFLAGS'] = '' extension(EXT_WINRC)(rc_file) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/qt4.py0000664000175000017500000003341610772264157022063 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) """ Qt4 support If QT4_ROOT is given (absolute path), the configuration will look in it first This module also demonstrates how to add tasks dynamically (when the build has started) """ import os, sys import ccroot, cxx import Action, Params, Object, Task, Utils, Runner from Object import taskgen, feature, after, extension from Params import error, fatal MOC_H = ['.h', '.hpp', '.hxx', '.hh'] EXT_RCC = ['.qrc'] EXT_UI = ['.ui'] EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C', '.c'] class MTask(Task.Task): "A cpp task that may create a moc task dynamically" def __init__(self, action_name, env, parent, priority=10): Task.Task.__init__(self, action_name, env, priority) self.moc_done = 0 self.parent = parent def may_start(self): if self.moc_done: # if there is a moc task, delay the computation of the file signature for t in self.get_run_after(): if not t.m_hasrun: return 0 # we need to recompute the signature as the moc task has finally run # unfortunately, the moc file enters in the dependency calculation TODO delattr(self, 'sign_all') self.signature() return Task.Task.may_start(self) tree = Params.g_build parn = self.parent node = self.m_inputs[0] # to know if there is a moc file to create self.signature() moctasks=[] mocfiles=[] variant = node.variant(parn.env) try: tmp_lst = tree.m_raw_deps[variant][node] tree.m_raw_deps[variant][node] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith('.moc'): continue # paranoid check if d in mocfiles: error("paranoia owns") continue # process that base.moc only once mocfiles.append(d) # find the extension - this search is done only once ext = '' try: ext = Params.g_options.qt_header_ext except AttributeError: pass if not ext: base2 = d[:-4] path = node.m_parent.srcpath(parn.env) for i in MOC_H: try: # TODO we could use find_source os.stat(os.path.join(path, base2+i)) except OSError: pass else: ext = i break if not ext: fatal("no header found for %s which is a moc file" % str(d)) # next time we will not search for the extension (look at the 'for' loop below) h_node = node.m_parent.find_source(base2+i) m_node = h_node.change_ext('.moc') tree.m_depends_on[variant][m_node] = h_node # create the task task = Task.Task('moc', parn.env, normal=0) task.set_inputs(h_node) task.set_outputs(m_node) generator = Params.g_build.generator generator.outstanding.insert(0, task) generator.total += 1 moctasks.append(task) # remove raw deps except the moc files to save space (optimization) tmp_lst = tree.m_raw_deps[variant][node] = mocfiles # look at the file inputs, it is set right above try: lst = tree.m_depends_on[variant][node] except KeyError: lst=[] for d in lst: name = d.m_name if name.endswith('.moc'): task = Task.Task('moc', parn.env, normal=0) task.set_inputs(tree.m_depends_on[variant][d]) task.set_outputs(d) generator = Params.g_build.generator generator.outstanding.insert(0, task) generator.total += 1 moctasks.append(task) break # simple scheduler dependency: run the moc task before others self.m_run_after = moctasks self.moc_done = 1 return 0 def translation_update(task): outs=[a.abspath(task.env) for a in task.m_outputs] outs=" ".join(outs) lupdate = task.env['QT_LUPDATE'] for x in task.m_inputs: file = x.abspath(task.env) cmd = "%s %s -ts %s" % (lupdate, file, outs) Params.pprint('BLUE', cmd) Runner.exec_command(cmd) def create_rcc_task(self, node): "hook for rcc files" # run rcctask with one of the highest priority # TODO add the dependency on the files listed in .qrc rcnode = node.change_ext('_rc.cpp') rcctask = self.create_task('rcc', self.env) rcctask.m_inputs = [node] rcctask.m_outputs = [rcnode] cpptask = self.create_task('cpp', self.env) cpptask.m_inputs = [rcnode] cpptask.m_outputs = [rcnode.change_ext('.o')] self.compiled_tasks.append(cpptask) return cpptask def create_uic_task(self, node): "hook for uic tasks" uictask = self.create_task('ui4', self.env) uictask.m_inputs = [node] uictask.m_outputs = [node.change_ext('.h')] class qt4obj(cxx.cpp_taskgen): def __init__(self, type='program', subtype=None): cxx.cpp_taskgen.__init__(self, type, subtype) self.link_task = None self.lang = '' self.langname = '' self.update = 0 self.features.append('qt4') def apply_qt4(self): if self.lang: lst=[] trans=[] for l in self.to_list(self.lang): t = Task.Task('ts2qm', self.env, 4) t.set_inputs(self.path.find_build(l+'.ts')) t.set_outputs(t.m_inputs[0].change_ext('.qm')) lst.append(t.m_outputs[0]) if self.update: trans.append(t.m_inputs[0]) if self.update and Params.g_options.trans_qt4: # we need the cpp files given, except the rcc task we create after u = Task.TaskCmd(translation_update, self.env, 2) u.m_inputs = [a.m_inputs[0] for a in self.compiled_tasks] u.m_outputs = trans if self.langname: t = Task.Task('qm2rcc', self.env, 40) t.set_inputs(lst) t.set_outputs(self.path.find_build(self.langname+'.qrc')) t.path = self.path k = create_rcc_task(self, t.m_outputs[0]) self.link_task.m_inputs.append(k.m_outputs[0]) lst = [] for flag in self.to_list(self.env['CXXFLAGS']): if len(flag) < 2: continue if flag[0:2] == '-D' or flag[0:2] == '-I': lst.append(flag) self.env['MOC_FLAGS'] = lst def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]): "the .ts files are added to self.lang" lst=[] excludes = self.to_list(excludes) #make sure dirnames is a list helps with dirnames with spaces dirnames = self.to_list(dirnames) ext_lst = exts or self.mappings.keys() + Object.task_gen.mappings.keys() for name in dirnames: #print "name is ", name anode = self.path.ensure_node_from_lst(Utils.split_path(name)) #print "anode ", anode.m_name, " ", anode.files() Params.g_build.rescan(anode) #print "anode ", anode.m_name, " ", anode.files() for file in anode.files(): #print "file found ->", file (base, ext) = os.path.splitext(file.m_name) if ext in ext_lst: s = file.relpath(self.path) if not s in lst: if s in excludes: continue lst.append(s) elif ext == '.ts': self.lang += ' '+base lst.sort() self.source = self.source+' '+(" ".join(lst)) setattr(qt4obj, 'find_sources_in_dirs', find_sources_in_dirs) def cxx_hook(self, node): # create the compilation task: cpp or cc task = MTask('cpp', self.env, self) self.m_tasks.append(task) try: obj_ext = self.obj_ext except AttributeError: obj_ext = '_%s.o' % self.m_type[:2] task.m_scanner = ccroot.g_c_scanner task.path_lst = self.inc_paths task.defines = self.scanner_defines task.m_inputs = [node] task.m_outputs = [node.change_ext(obj_ext)] self.compiled_tasks.append(task) def process_qm2rcc(task): outfile = task.m_outputs[0].abspath(task.env()) f = open(outfile, 'w') f.write('\n\n') for k in task.m_inputs: f.write(' ') #f.write(k.m_name) f.write(k.relpath(task.path)) f.write('\n') f.write('\n') f.close() Action.simple_action('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], prio=100) Action.simple_action('rcc', '${QT_RCC} -name ${SRC[0].m_name} ${SRC} ${RCC_ST} -o ${TGT}', color='BLUE', prio=60) Action.simple_action('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', prio=60) Action.simple_action('ts2qm', '${QT_LRELEASE} ${SRC} -qm ${TGT}', color='BLUE', prio=40) Action.Action('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', prio=60) def detect_qt4(conf): env = conf.env opt = Params.g_options qtlibs = getattr(opt, 'qtlibs', '') qtincludes = getattr(opt, 'qtincludes', '') qtbin = getattr(opt, 'qtbin', '') useframework = getattr(opt, 'use_qt4_osxframework', True) qtdir = getattr(opt, 'qtdir', '') if not qtdir: qtdir = os.environ.get('QT4_ROOT', '') if not qtdir: try: lst = os.listdir('/usr/local/Trolltech/') lst.sort() lst.reverse() qtdir = '/usr/local/Trolltech/%s/' % lst[0] except OSError: pass if not qtdir: try: path = os.environ['PATH'].split(':') for qmk in ['qmake-qt4', 'qmake4', 'qmake']: qmake = conf.find_program(qmk, path) if qmake: version = os.popen(qmake+" -query QT_VERSION").read().strip().split('.') if version[0] == "4": qtincludes = os.popen(qmake+" -query QT_INSTALL_HEADERS").read().strip() qtdir = os.popen(qmake + " -query QT_INSTALL_PREFIX").read().strip()+"/" qtbin = os.popen(qmake + " -query QT_INSTALL_BINS").read().strip()+"/" break except OSError: pass # check for the qt includes first if not qtincludes: qtincludes = qtdir + 'include/' env['QTINCLUDEPATH']=qtincludes lst = [qtincludes, '/usr/share/qt4/include/', '/opt/qt4/include'] test = conf.create_header_enumerator() test.name = 'QtGui/QFont' test.path = lst test.mandatory = 1 ret = test.run() # check for the qtbinaries if not qtbin: qtbin = qtdir + 'bin/' binpath = [qtbin, '/usr/share/qt4/bin/'] + os.environ['PATH'].split(':') def find_bin(lst, var): for f in lst: ret = conf.find_program(f, path_list=binpath) if ret: env[var]=ret break find_bin(['uic-qt3', 'uic3'], 'QT_UIC3') find_bin(['uic-qt4', 'uic'], 'QT_UIC') version = os.popen(env['QT_UIC'] + " -version 2>&1").read().strip() version = version.replace('Qt User Interface Compiler ','') version = version.replace('User Interface Compiler for Qt', '') if version.find(" 3.") != -1: conf.check_message('uic version', '(too old)', 0, option='(%s)'%version) sys.exit(1) conf.check_message('uic version', '', 1, option='(%s)'%version) find_bin(['moc-qt4', 'moc'], 'QT_MOC') find_bin(['rcc'], 'QT_RCC') find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE') find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE') env['UIC3_ST']= '%s -o %s' env['UIC_ST'] = '%s -o %s' env['MOC_ST'] = '-o' # check for the qt libraries if not qtlibs: qtlibs = qtdir + 'lib' vars = "Qt3Support QtCore QtGui QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit".split() framework_ok = False if sys.platform == "darwin" and useframework: for i in vars: e = conf.create_framework_configurator() e.path = [qtlibs] e.name = i e.remove_dot_h = 1 e.run() if not i == 'QtCore': # strip -F flag so it don't get reduant for r in env['CCFLAGS_' + i.upper()]: if r.startswith('-F'): env['CCFLAGS_' + i.upper()].remove(r) break incflag = '-I%s' % os.path.join(qtincludes, i) if not incflag in env["CCFLAGS_" + i.upper ()]: env['CCFLAGS_' + i.upper ()] += [incflag] if not incflag in env["CXXFLAGS_" + i.upper ()]: env['CXXFLAGS_' + i.upper ()] += [incflag] # now we add some static depends. if conf.is_defined("HAVE_QTOPENGL"): if not '-framework OpenGL' in env["LINKFLAGS_QTOPENGL"]: env["LINKFLAGS_QTOPENGL"] += ['-framework OpenGL'] if conf.is_defined("HAVE_QTGUI"): if not '-framework AppKit' in env["LINKFLAGS_QTGUI"]: env["LINKFLAGS_QTGUI"] += ['-framework AppKit'] if not '-framework ApplicationServices' in env["LINKFLAGS_QTGUI"]: env["LINKFLAGS_QTGUI"] += ['-framework ApplicationServices'] framework_ok = True if not framework_ok: # framework_ok is false either when the platform isn't OSX, Qt4 shall not be used as framework, or Qt4 could not be found as framework vars_debug = [a+'_debug' for a in vars] for i in vars_debug+vars: #conf.check_pkg(i, pkgpath=qtlibs) pkgconf = conf.create_pkgconfig_configurator() pkgconf.name = i pkgconf.pkgpath = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs) pkgconf.run() # the libpaths are set nicely, unfortunately they make really long command-lines # remove the qtcore ones from qtgui, etc def process_lib(vars_, coreval): for d in vars_: var = d.upper() if var == 'QTCORE': continue value = env['LIBPATH_'+var] if value: core = env[coreval] accu = [] for lib in value: if lib in core: continue accu.append(lib) env['LIBPATH_'+var] = accu process_lib(vars, 'LIBPATH_QTCORE') process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG') # rpath if wanted if Params.g_options.want_rpath: def process_rpath(vars_, coreval): for d in vars_: var = d.upper() value = env['LIBPATH_'+var] if value: core = env[coreval] accu = [] for lib in value: if var != 'QTCORE': if lib in core: continue accu.append('-Wl,--rpath='+lib) env['RPATH_'+var] = accu process_rpath(vars, 'LIBPATH_QTCORE') process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG') env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale' def detect(conf): if sys.platform=='win32': fatal('Qt4.py will not work on win32 for now - ask the author') detect_qt4(conf) def set_options(opt): try: opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') except Exception: pass opt.add_option('--header-ext', type='string', default='', help='header extension for moc files', dest='qt_header_ext') for i in "qtdir qtincludes qtlibs qtbin".split(): opt.add_option('--'+i, type='string', default='', dest=i) if sys.platform == "darwin": opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True) opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False) extension(EXT_RCC)(create_rcc_task) extension(EXT_UI)(create_uic_task) taskgen(apply_qt4) feature('qt4')(apply_qt4) after('apply_link')(apply_qt4) extension(EXT_QT4)(cxx_hook) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/msvc.py0000664000175000017500000003421510771537440022316 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2006 (dv) # Tamas Pal, 2007 (folti) # Visual C support - beta, needs more testing import os, sys, re, string, optparse import Utils, Action, Params, Object, Runner, Configure from Params import debug, error, fatal, warning from Utils import quote_whitespace from Object import taskgen, after, before, feature import ccroot from libtool import read_la_file from os.path import exists def msvc_linker(task): """Special linker for MSVC with support for embedding manifests into DLL's and executables compiled by Visual Studio 2005 or probably later. Without the manifest file, the binaries are unusable. See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx Problems with this tool: it is always called whether MSVC creates manifests or not.""" e = task.env() linker = e['LINK'] srcf = e['LINK_SRC_F'] trgtf = e['LINK_TGT_F'] linkflags = e.get_flat('LINKFLAGS') libdirs = e.get_flat('_LIBDIRFLAGS') libs = e.get_flat('_LIBFLAGS') subsystem='' if task.m_subsystem: subsystem='/subsystem:%s' % task.m_subsystem outfile=task.m_outputs[0].bldpath(e) manifest=outfile+'.manifest' # pdb file containing the debug symbols (if compiled with /Zi or /ZI and linked with /debug pdbnode=task.m_outputs[0].change_ext('.pdb') pdbfile=pdbnode.bldpath(e) objs=" ".join(['"%s"' % a.abspath(e) for a in task.m_inputs]) cmd="%s %s %s%s %s%s %s %s %s" % (linker,subsystem,srcf,objs,trgtf,outfile, linkflags, libdirs,libs) ret=Runner.exec_command(cmd) if ret: return ret # check for the pdb file. if exists, add to the list of outputs if os.path.exists(pdbfile): task.m_outputs.append(pdbnode) if os.path.exists(manifest): debug('manifesttool', 'msvc') mtool = e['MT'] if not mtool: return 0 mode='' # embedding mode. Different for EXE's and DLL's. # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx if task.m_type == 'program': mode='1' elif task.m_type == 'shlib': mode='2' debug('embedding manifest','msvcobj') flags = e['MTFLAGS'] if flags: flags=string.join(flags,' ') else: flags='' cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags, manifest, outfile, mode) ret=Runner.exec_command(cmd) return ret # importlibs provided by MSVC/Platform SDK. Do NOT search them.... g_msvc_systemlibs = """ aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp """.split() def find_lt_names_msvc(self, libname, is_static=False): """ Win32/MSVC specific code to glean out information from libtool la files. this function is not attached to the task_gen class """ lt_names=[ 'lib%s.la' % libname, '%s.la' % libname, ] for path in self.libpaths: for la in lt_names: laf=os.path.join(path,la) dll=None if exists(laf): ltdict=read_la_file(laf) lt_libdir=None if ltdict.has_key('libdir') and ltdict['libdir'] != '': lt_libdir=ltdict['libdir'] if not is_static and ltdict.has_key('library_names') and ltdict['library_names'] != '': dllnames=ltdict['library_names'].split() dll=dllnames[0].lower() dll=re.sub('\.dll$', '', dll) return (lt_libdir, dll, False) elif ltdict.has_key('old_library') and ltdict['old_library'] != '': olib=ltdict['old_library'] if exists(os.path.join(path,olib)): return (path, olib, True) elif lt_libdir != '' and exists(os.path.join(lt_libdir,olib)): return (lt_libdir, olib, True) else: return (None, olib, True) else: fatal('invalid libtool object file: %s' % laf) return (None, None, None) def libname_msvc(self, libname, is_static=False): lib=libname.lower() lib=re.sub('\.lib$','',lib) if lib in g_msvc_systemlibs: return lib+'.lib' lib=re.sub('^lib','',lib) if lib == 'm': return None (lt_path, lt_libname, lt_static) = find_lt_names_msvc(self, lib, is_static) if lt_path != None and lt_libname != None: if lt_static == True: # file existance check has been made by find_lt_names return os.path.join(lt_path,lt_libname) if lt_path != None: _libpaths=[lt_path] + self.libpaths else: _libpaths=self.libpaths static_libs=[ '%ss.lib' % lib, 'lib%ss.lib' % lib, '%s.lib' %lib, 'lib%s.lib' % lib, ] dynamic_libs=[ 'lib%s.dll.lib' % lib, 'lib%s.dll.a' % lib, '%s.dll.lib' % lib, '%s.dll.a' % lib, 'lib%s_d.lib' % lib, '%s_d.lib' % lib, '%s.lib' %lib, ] libnames=static_libs if not is_static: libnames=dynamic_libs + static_libs for path in _libpaths: for libn in libnames: if os.path.exists(os.path.join(path,libn)): debug('lib found: %s' % os.path.join(path,libn), 'msvc') return libn return None def apply_msvc_obj_vars(self): debug('apply_msvc_obj_vars called for msvcobj', 'msvc') env = self.env app = env.append_unique cpppath_st = env['CPPPATH_ST'] lib_st = env['LIB_ST'] staticlib_st = env['STATICLIB_ST'] libpath_st = env['LIBPATH_ST'] staticlibpath_st = env['STATICLIBPATH_ST'] self.addflags('CPPFLAGS', self.cppflags) for i in env['RPATH']: app('LINKFLAGS', i) for i in env['LIBPATH']: app('LINKFLAGS', libpath_st % i) if not self.libpaths.count(i): self.libpaths.append(i) for i in env['LIBPATH']: app('LINKFLAGS', staticlibpath_st % i) if not self.libpaths.count(i): self.libpaths.append(i) # i doubt that anyone will make a fully static binary anyway if not env['FULLSTATIC']: if env['STATICLIB'] or env['LIB']: app('LINKFLAGS', env['SHLIB_MARKER']) if env['STATICLIB']: app('LINKFLAGS', env['STATICLIB_MARKER']) for i in env['STATICLIB']: debug('libname: %s' % i,'msvc') libname = libname_msvc(self, i, True) debug('libnamefixed: %s' % libname,'msvc') if libname != None: app('LINKFLAGS', libname) if self.env['LIB']: for i in env['LIB']: debug('libname: %s' % i,'msvc') libname = libname_msvc(self, i) debug('libnamefixed: %s' % libname,'msvc') if libname != None: app('LINKFLAGS', libname) def apply_link_msvc(self): # if we are only building .o files, tell which ones we built # FIXME remove the "type" thing # FIXME simplify this piece of code (about the same is in ccroot.py) if self.m_type == 'objects': self.out_nodes = [] app = self.out_nodes.append for t in self.compiled_tasks: app(t.m_outputs[0]) return if self.m_type=='staticlib': linktask = self.create_task('msvc_ar_link_static', self.env) else: linktask = self.create_task('msvc_%s_link' % self.m_type_initials, self.env) outputs = [t.m_outputs[0] for t in self.compiled_tasks] linktask.set_inputs(outputs) linktask.set_outputs(self.path.find_build(get_target_name(self))) link_task.m_type = self.m_type link_task.m_subsystem = getattr(self, 'subsystem', '') self.link_task = linktask def init_msvc(self): "all methods (msvc and non-msvc) are to be executed, but we remove the ones we do not want" if self.env['MSVC']: self.meths.remove('apply_link') else: for x in ['apply_link_msvc', 'apply_msvc_obj_vars'] self.meths.remove(x) self.libpaths = getattr(self, 'libpaths', '') static_link_str = '${STLIBLINK} ${LINK_SRC_F}${SRC} ${LINK_TGT_F}${TGT}' Action.simple_action('msvc_ar_link_static', static_link_str, color='YELLOW', prio=101) Action.Action('msvc_cc_link', vars=['LINK', 'LINK_SRC_F', 'LINK_TGT_F', 'LINKFLAGS', '_LIBDIRFLAGS', '_LIBFLAGS', 'MT', 'MTFLAGS'] , color='YELLOW', func=msvc_linker, prio=101) Action.Action('msvc_cpp_link', vars=['LINK', 'LINK_SRC_F', 'LINK_TGT_F', 'LINKFLAGS', '_LIBDIRFLAGS', '_LIBFLAGS', 'MT', 'MTFLAGS'] , color='YELLOW', func=msvc_linker, prio=101) rc_str='${RC} ${RCFLAGS} /fo ${TGT} ${SRC}' Action.simple_action('rc', rc_str, color='GREEN', prio=50) import winres def detect(conf): # due to path format limitations, limit operation only to native Win32. Yeah it sucks. if sys.platform != 'win32': conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet') comp = conf.find_program('CL', var='CXX') if not comp: conf.fatal('CL was not found (compiler)') link = conf.find_program('LINK') if not link: conf.fatal('LINK was not found (linker)') stliblink = conf.find_program('LIB') if not stliblink: return manifesttool = conf.find_program('MT') v = conf.env # c/c++ compiler - check for whitespace, and if so, add quotes v['CC'] = quote_whitespace(comp) v['CXX'] = v['CC'] v['MSVC'] = 1 v['CPPFLAGS'] = ['/W3', '/nologo', '/EHsc', '/errorReport:prompt'] v['CCDEFINES'] = ['WIN32'] # command-line defines v['CXXDEFINES'] = ['WIN32'] # command-line defines v['_CCINCFLAGS'] = [] v['_CCDEFFLAGS'] = [] v['_CXXINCFLAGS'] = [] v['_CXXDEFFLAGS'] = [] v['CC_SRC_F'] = '' v['CC_TGT_F'] = '/c /Fo' v['CXX_SRC_F'] = '' v['CXX_TGT_F'] = '/c /Fo' v['CPPPATH_ST'] = '/I%s' # template for adding include paths # Subsystem specific flags v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE'] v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE'] v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX'] v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS'] v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE'] # CRT specific flags v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT'] v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD'] v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd'] v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd'] v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # compiler debug levels v['CCFLAGS'] = ['/TC'] v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] v['CXXFLAGS'] = ['/TP'] v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] # linker v['STLIBLINK'] = '\"%s\"' % stliblink v['LINK'] = '\"%s\"' % link v['LIB'] = [] v['LINK_TGT_F'] = '/OUT:' v['LINK_SRC_F'] = ' ' v['LIB_ST'] = '%s.lib' # template for adding libs v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths v['STATICLIB_ST'] = '%s.lib' v['STATICLIBPATH_ST'] = '/LIBPATH:%s' v['CCDEFINES_ST'] = '/D%s' v['CXXDEFINES_ST'] = '/D%s' v['_LIBDIRFLAGS'] = '' v['_LIBFLAGS'] = '' v['SHLIB_MARKER'] = '' v['STATICLIB_MARKER'] = '' conf.check_tool('winres') if not conf.env['WINRC']: warning('Resource compiler not found. Compiling resource file is disabled','msvc') # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later if manifesttool: v['MT'] = quote_whitespace (manifesttool) v['MTFLAGS']=['/NOLOGO'] v['LINKFLAGS'] = ['/NOLOGO', '/MACHINE:X86', '/ERRORREPORT:PROMPT'] try: debug_level = Params.g_options.debug_level.upper() except AttributeError: debug_level = ccroot.DEBUG_LEVELS.CUSTOM v['CCFLAGS'] += v['CCFLAGS_'+debug_level] v['CXXFLAGS'] += v['CXXFLAGS_'+debug_level] v['LINKFLAGS'] += v['LINKFLAGS_'+debug_level] conf.add_os_flags('CFLAGS', 'CCFLAGS') conf.add_os_flags('CPPFLAGS') conf.add_os_flags('CXXFLAGS') conf.add_os_flags('LINKFLAGS') # shared library v['shlib_CCFLAGS'] = [''] v['shlib_CXXFLAGS'] = [''] v['shlib_LINKFLAGS']= ['/DLL'] v['shlib_PATTERN'] = '%s.dll' # plugins. We handle them exactly as shlibs # everywhere except on osx, where we do bundles v['plugin_CCFLAGS'] = v['shlib_CCFLAGS'] v['plugin_LINKFLAGS'] = v['shlib_LINKFLAGS'] v['plugin_PATTERN'] = v['shlib_PATTERN'] # static library v['staticlib_LINKFLAGS'] = [''] v['staticlib_PATTERN'] = '%s.lib' v['program_PATTERN'] = '%s.exe' def set_options(opt): try: opt.add_option('-d', '--debug-level', action = 'store', default = ccroot.DEBUG_LEVELS.DEBUG, help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), choices = ccroot.DEBUG_LEVELS.ALL, dest = 'debug_level') except optparse.OptionConflictError: pass # maybe already defined by another C-compiler taskgen(apply_msvc_obj_vars) feature('cc')(apply_msvc_obj_vars) feature('cxx')(apply_msvc_obj_vars) after('apply_obj_vars_cc')(apply_msvc_obj_vars) after('apply_obj_vars_cxx')(apply_msvc_obj_vars) taskgen(apply_link_msvc) feature('cc')(apply_link_msvc) feature('cxx')(apply_link_msvc) after('apply_core')(apply_link_msvc) before('apply_obj_vars_cc')(apply_link_msvc) before('apply_obj_vars_cxx')(apply_link_msvc) taskgen(init_msvc) feature('cc')(init_msvc) feature('cxx')(init_msvc) before('apply_core')(init_msvc) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/boost.py0000664000175000017500000001726110771525013022467 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Gernot Vormayr, 2008 """ Quick n dirty boost detections """ import os, glob, types import Params, Configure from Params import fatal def detect_boost(conf): env = conf.env opt = Params.g_options want_asio = 0 if env['WANT_BOOST']: if type(env['WANT_BOOST']) is types.StringType: want_libs = env['WANT_BOOST'].split() else: want_libs = env['WANT_BOOST'] if want_libs.count('ASIO'): want_libs.remove('ASIO') want_asio=1 if want_libs.count('ASIO_MT'): want_libs.remove('ASIO_MT') want_asio=2 else: want_libs = 0 boostlibs = getattr(opt, 'boostlibs', '') boostincludes = getattr(opt, 'boostincludes', '') asioincludes = getattr(opt, 'asioincludes', '') boostfolder = getattr(opt, 'boostfolder', '') if boostfolder: boostincludes=boostfolder+'/include' boostlibs=boostfolder+'/lib' #let's try to find boost which is not easy, cause boost seems like it wants to hide :( if not boostincludes: boostincludes= ['/sw/include', '/usr/local/include', '/opt/include', '/opt/local/include', '/usr/include'] else: boostincludes=[boostincludes] guess=[] for dir in boostincludes: try: for subdir in os.listdir(dir): # we have to check for boost or boost-version cause there are systems # which put boost directly into a boost subdir (eg. gentoo) if subdir=='boost': guess.append(dir) elif subdir.startswith('boost-'): guess.append(dir+'/'+subdir) except OSError: pass if not guess: fatal('boost headers not found') return 0 versions={} for dir in guess: test_obj = Configure.check_data() test_obj.code = '#include \n#include \nint main() { std::cout << BOOST_VERSION << std::endl; return 0; }\n' test_obj.env = env test_obj.env['CPPPATH']=[dir] test_obj.execute = 1 test_obj.force_compiler='cpp' ret=conf.run_check(test_obj) if ret: versions[int(ret['result'])]=dir version=versions.keys() errtext='' if env['WANT_BOOST_MIN']: errtext+='>= '+env['WANT_BOOST_MIN']+' ' min_version=env['WANT_BOOST_MIN'].split('.') min_version=int(min_version[0])*100000+int(min_version[1])*100+int(min_version[2]) version=filter(lambda x:x>=min_version,version) if env['WANT_BOOST_MAX']: errtext+='<= '+env['WANT_BOOST_MAX']+' ' max_version=env['WANT_BOOST_MAX'].split('.') max_version=int(max_version[0])*100000+int(max_version[1])*100+int(max_version[2]) version=filter(lambda x:x<=max_version,version) version.sort() if len(version) is 0: fatal('No boost '+errtext+'found!') version=version.pop() boost_includes=versions[version] version="%d.%d.%d" % (version/100000,version/100%1000,version%100) conf.check_message('header','boost/version.hpp',1,'Version '+boost_includes+' ('+version+')') env['CPPPATH_BOOST']=boost_includes # search vor asio if want_asio: errtext='' asio_version=min_version=max_version=0 if env['WANT_ASIO_MIN']: errtext+='>= '+env['WANT_ASIO_MIN']+' ' min_version=env['WANT_ASIO_MIN'].split('.') min_version=int(min_version[0])*100000+int(min_version[1])*100+int(min_version[2]) if env['WANT_ASIO_MAX']: errtext+='<= '+env['WANT_ASIO_MAX']+' ' max_version=env['WANT_ASIO_MAX'].split('.') max_version=int(max_version[0])*100000+int(max_version[1])*100+int(max_version[2]) #first look in the boost dir - but not when asioincludes is set if not asioincludes: test_obj = Configure.check_data() test_obj.code = '#include \n#include \nint main() { std::cout << BOOST_ASIO_VERSION << std::endl; return 0; }\n' test_obj.env = env test_obj.env['CPPPATH']=[boost_includes] test_obj.execute = 1 test_obj.force_compiler='cpp' ret=conf.run_check(test_obj) if ret: asio_version=int(ret['result']) if min_version and asio_versionmax_version: asio_version=0 if asio_version: conf.define('BOOST_ASIO',1) version="%d.%d.%d" % (asio_version/100000,asio_version/100%1000,asio_version%100) conf.check_message('header','boost/asio/version.hpp',1,'Version '+version) if want_asio==1: if want_libs: try: want_libs.remove('BOOST_SYSTEM') except ValueError: pass want_libs.append('BOOST_SYSTEM') else: want_libs=['BOOST_SYSTEM'] else: if want_libs: try: want_libs.remove('BOOST_SYSTEM_MT') except ValueError: pass want_libs.append('BOOST_SYSTEM_MT') else: want_libs=['BOOST_SYSTEM_MT'] #ok not in boost dir - ahh did i say ok? na imho that's not ok! if not asio_version: if not asioincludes: asioincludes= ['/sw/include', '/usr/local/include', '/opt/include', '/opt/local/include', '/usr/include'] else: asioincludes=[asioincludes] versions={} for dir in asioincludes: test_obj = Configure.check_data() test_obj.code = '#include \n#include \nint main() { std::cout << ASIO_VERSION << std::endl; return 0; }\n' test_obj.env = env test_obj.env['CPPPATH']=[dir] test_obj.execute = 1 test_obj.force_compiler='cpp' ret=conf.run_check(test_obj) if ret: versions[int(ret['result'])]=dir version=versions.keys() if min_version: version=filter(lambda x:x>=min_version,version) if max_version: version=filter(lambda x:x<=max_version,version) version.sort() if len(version) is 0: fatal('No asio '+errtext+'found!') version=version.pop() asio_includes=versions[version] version="%d.%d.%d" % (version/100000,version/100%1000,version%100) conf.check_message('header','asio/version.hpp',1,'Version '+asio_includes+' ('+version+')') env['CPPPATH_ASIO']=asio_includes env['CPPPATH_ASIO_MT']=asio_includes conf.undefine('BOOST_ASIO') #well now we've found our includes - let's search for the precompiled libs if want_libs: def check_boost_libs(libs,lib_path): files=glob.glob(lib_path+'/libboost_*'+env['shlib_SUFFIX']) files=map(lambda x:x[len(lib_path)+4:-len(env['shlib_SUFFIX'])] ,filter(lambda x: x.find('-d')==-1 ,files)) for lib in libs: libname=lib.lower() if libname.endswith('_mt'): libname=libname[0:-3]+'-mt' for file in files: if file.startswith(libname): conf.check_message('library',libname,1,file) env['LIBPATH_'+lib]=lib_path env['LIB_'+lib]=file if lib is 'BOOST_SYSTEM': env['LIB_ASIO']=file env['LIBPATH_ASIO']=file elif lib is 'BOOST_SYSTEM_MT': env['LIB_ASIO_MT']=file env['LIBPATH_ASIO_MT']=file break else: fatal('lib '+libname+' not found!') if not boostlibs: boostlibs=['/usr/lib64', '/usr/lib32', '/usr/lib', '/sw/lib', '/usr/local/lib', '/opt/lib', '/opt/local/lib'] else: boostlibs=[boostlibs] lib_path=Configure.find_file_ext('libboost_*'+version+'*',boostlibs) if lib_path=='': lib_path=Configure.find_file_ext('libboost_*',boostlibs) if lib_path=='': conf.check_message('library','boost',0,'') else: check_boost_libs(want_libs,lib_path) else: check_boost_libs(want_libs,lib_path) return 1 def detect(conf): return detect_boost(conf) def set_options(opt): opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_34_1') opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib') opt.add_option('--boost', type='string', default='', dest='boostfolder', help='path to the directory where the boost lives are e.g. /usr/local') opt.add_option('--asio-includes', type='string', default='', dest='asioincludes', help='path to asio e.g. /usr/local/include/asio') lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/bison.py0000664000175000017500000000126010771525013022443 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # John O'Meara, 2006 "Bison processing" import Object def decide_ext(self, node): c_ext = '.tab.c' if node.m_name.endswith('.yc'): c_ext = '.tab.cc' if '-d' in self.env['BISONFLAGS']: return [c_ext, c_ext.replace('c', 'h')] else: return c_ext Object.declare_chain( name = 'bison', action = 'cd ${SRC[0].bld_dir(env)} && ${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].m_name}', ext_in = ['.y', '.yc'], ext_out = decide_ext ) def detect(conf): bison = conf.find_program('bison', var='BISON') if not bison: conf.fatal("bison was not found") v = conf.env v['BISONFLAGS'] = '-d' lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/intltool.py0000664000175000017500000000670610772264157023221 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "intltool support" import os, re import Object, Action, Params, Common, Scan, Utils, Runner import cc from Params import fatal, error # intltool class intltool_in_taskgen(Object.task_gen): def __init__(self): Object.task_gen.__init__(self) self.source = '' self.inst_var = '' self.inst_dir = '' self.flags = '' self.podir = 'po' self.intlcache = '.intlcache' self.m_tasks = [] def apply(self): self.env = self.env.copy() tree = Params.g_build current = tree.m_curdirnode for i in self.to_list(self.source): node = self.path.find_source(i) podirnode = self.path.find_source(self.podir) self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), self.podir, self.intlcache) self.env['INTLPODIR'] = podirnode.srcpath(self.env) self.env['INTLFLAGS'] = self.flags task = self.create_task('intltool', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('')) task.install = {'var': self.inst_var, 'dir': self.inst_dir, 'chmod': 0644} class intltool_po_taskgen(Object.task_gen): def __init__(self, appname='set_your_app_name'): Object.task_gen.__init__(self) self.chmod = 0644 self.inst_var = 'LOCALEDIR' self.appname = appname self.m_tasks=[] def apply(self): def install_translation(task): out = task.m_outputs[0] filename = out.m_name (langname, ext) = os.path.splitext(filename) inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + self.appname + '.mo' Common.install_as(self.inst_var, inst_file, out.abspath(self.env), chmod=self.chmod) linguas = self.path.find_source('LINGUAS') if linguas: # scan LINGUAS file for locales to process f = open(linguas.abspath()) re_linguas = re.compile('[-a-zA-Z_@.]+') for line in f.readlines(): # Make sure that we only process lines which contain locales if re_linguas.match(line): node = self.path.find_build(re_linguas.match(line).group() + '.po') task = self.create_task('po', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('.mo')) if Params.g_install: task.install = install_translation else: Params.pprint('RED', "Error no LINGUAS file found in po directory") Action.simple_action('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', prio=10) Action.simple_action('intltool', '${INTLTOOL} ${INTLFLAGS} -q -u -c ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}', color='BLUE', prio=200) def detect(conf): conf.check_tool('checks') pocom = conf.find_program('msgfmt') #if not pocom: # fatal('The program msgfmt (gettext) is mandatory!') conf.env['POCOM'] = pocom intltool = conf.find_program('intltool-merge') #if not intltool: # fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!') conf.env['INTLTOOL'] = intltool def getstr(varname): return getattr(Params.g_options, varname, '') prefix = conf.env['PREFIX'] datadir = getstr('datadir') if not datadir: datadir = os.path.join(prefix,'share') conf.define('LOCALEDIR', os.path.join(datadir, 'locale')) conf.define('DATADIR', datadir) #Define to 1 if you have the header file. conf.check_header('locale.h', 'HAVE_LOCALE_H') def set_options(opt): try: opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') except Exception: pass for i in "datadir".split(): opt.add_option('--'+i, type='string', default='', dest=i) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/cs.py0000664000175000017500000000355010772264157021754 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "C# support" import Params, Action, Object, Utils from Params import error g_types_lst = ['program', 'library'] class cs_taskgen(Object.task_gen): def __init__(self, type): Object.task_gen.__init__(self) self.m_type = type self.source = '' self.target = '' self.flags = '' self.assemblies = '' self.resources = '' self.uselib = '' self._flag_vars = ['FLAGS', 'ASSEMBLIES'] if not self.env: self.env = Params.g_build.env().copy() if not type in g_types_lst: error('type for csobj is undefined '+type) type='program' def apply(self): self.apply_uselib() # process the flags for the assemblies assemblies_flags = [] for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']: assemblies_flags += '/r:'+i self.env['_ASSEMBLIES'] += assemblies_flags # process the flags for the resources for i in self.to_list(self.resources): self.env['_RESOURCES'].append('/resource:'+i) # additional flags self.env['_FLAGS'] += self.to_list(self.flags) + self.env['FLAGS'] curnode = self.path # process the sources nodes = [] for i in self.to_list(self.source): nodes.append(curnode.find_source(i)) # create the task task = self.create_task('mcs', self.env) task.m_inputs = nodes task.set_outputs(self.path.find_build(self.target)) def apply_uselib(self): if not self.uselib: return for var in self.to_list(self.uselib): for v in self._flag_vars: val = self.env[v+'_'+var] if val: self.env.append_value(v, val) Action.simple_action('mcs', '${MCS} ${SRC} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW', prio=101) def detect(conf): mcs = conf.find_program('mcs', var='MCS') if not mcs: mcs = conf.find_program('gmcs', var='MCS') lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/ccroot.py0000664000175000017500000003174110772264157022643 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) "base for all c/c++ programs and libraries" import os, sys, re import Action, Object, Params, Scan, Common, Utils, preproc from Params import error, debug, fatal, warning from Object import taskgen, after, before, feature class DEBUG_LEVELS: ULTRADEBUG = "ultradebug" DEBUG = "debug" RELEASE = "release" OPTIMIZED = "optimized" CUSTOM = "custom" ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM] class c_scanner(Scan.scanner): "scanner for c/c++ files" def __init__(self): Scan.scanner.__init__(self) self.vars = ('CCDEFINES', 'CXXDEFINES') def scan(self, task, node): "look for .h the .cpp need" debug("_scan_preprocessor(self, node, env, path_lst)", 'ccroot') gruik = preproc.c_parser(nodepaths = task.path_lst, defines = task.defines) gruik.start(node, task.env()) if Params.g_verbose: debug("nodes found for %s: %s %s" % (str(node), str(gruik.m_nodes), str(gruik.m_names)), 'deps') debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') return (gruik.m_nodes, gruik.m_names) g_c_scanner = c_scanner() "scanner for c programs" class ccroot_abstract(Object.task_gen): "Parent class for programs and libraries in languages c, c++ and moc (Qt)" def __init__(self, type='program', subtype=None): Object.task_gen.__init__(self) # TODO obsolete self.m_type = type if self.m_type == 'objects': self.features.append(type) else: self.features.append('normal') # includes, seen from the current directory self.includes='' # list of directories to enable when scanning # #include directives in source files for automatic # dependency tracking. If left empty, scanning the # whole project tree is enabled. If non-empty, only # the indicated directories (which must be relative # paths), plus the directories in obj.includes, are # scanned for #includes. self.dependencies = '' self.defines='' self.rpaths='' self.uselib='' # new scheme: provide the names of the local libraries to link with # the objects found will be post()-ed self.uselib_local='' # add .o files produced by another task_gen class self.add_objects = '' # version number for shared libraries #self.vnum='1.2.3' # #self.soname='.so.3' # else soname is computed from vnum #self.program_chmod = 0755 # by default: 0755 # do not forget to set the following variables in a subclass self.p_flag_vars = [] self.p_type_vars = [] # TODO ??? self.m_type_initials = '' # these are kind of private, do not touch self.incpaths_lst=[] self.inc_paths = [] self.scanner_defines = {} self.bld_incpaths_lst=[] self.subtype = subtype self.compiled_tasks = [] self.link_task = None self.inst_var = '' # mark as installable TODO # characteristics of what we want to build: cc, cpp, program, staticlib, shlib, etc #self.features = ['program'] def get_target_name(self): name = self.target pattern = self.env[self.m_type+'_PATTERN'] if not pattern: pattern = '%s' # name can be src/mylib k = name.rfind('/') return name[0:k+1] + pattern % name[k+1:] def apply_verif(self): if not hasattr(self, 'nochecks'): if not (self.source or self.add_objects): fatal('no source files specified for %s' % self) if not self.target and self.m_type != 'objects': fatal('no target for %s' % self) def install_shlib(task): nums = task.vnum.split('.') dest_var = task.dest_var dest_subdir = task.dest_subdir libname = task.m_outputs[0].m_name name3 = libname+'.'+task.vnum name2 = libname+'.'+nums[0] name1 = libname filename = task.m_outputs[0].relpath_gen(Params.g_build.m_curdirnode) Common.install_as(dest_var, dest_subdir+'/'+name3, filename, env=task.env()) Common.symlink_as(dest_var, name3, dest_subdir+'/'+name2) Common.symlink_as(dest_var, name3, dest_subdir+'/'+name1) def install_target(self): # FIXME too complicated if not Params.g_install: return dest_var = self.inst_var dest_subdir = self.inst_dir if dest_var == 0: return if not dest_var: dest_var = 'PREFIX' if self.m_type == 'program': dest_subdir = 'bin' else: dest_subdir = 'lib' if (self.m_type == 'shlib' or self.m_type == 'plugin') and getattr(self, 'vnum', '') and sys.platform != 'win32': # shared libraries on linux tsk = self.link_task tsk.vnum = self.vnum tsk.dest_var = dest_var tsk.dest_subdir = dest_subdir tsk.install = install_shlib else: # program or staticlib try: mode = self.program_chmod except AttributeError: if self.m_type == 'program': mode = 0755 else: mode = 0644 install = {'var':dest_var,'dir':dest_subdir,'chmod':mode} self.link_task.install = install def apply_dependencies(self): if self.dependencies: dep_lst = (self.to_list(self.dependencies) + self.to_list(self.includes)) self.inc_paths = [] for directory in dep_lst: if os.path.isabs(directory): Params.fatal("Absolute paths not allowed in obj.dependencies") return node = self.path.find_dir_lst(Utils.split_path(directory)) if not node: Params.fatal("node not found in ccroot:apply_dependencies " + str(directory), 'ccroot') return if node not in self.inc_paths: self.inc_paths.append(node) else: # by default, we include the whole project tree lst = [self.path] for obj in Object.g_allobjs: if obj.path not in lst: lst.append(obj.path) self.inc_paths = lst + self.incpaths_lst def apply_incpaths(self): lst = [] for i in self.to_list(self.uselib): if self.env['CPPPATH_'+i]: lst += self.to_list(self.env['CPPPATH_'+i]) inc_lst = self.to_list(self.includes) + lst lst = self.incpaths_lst # add the build directory self.incpaths_lst.append(Params.g_build.m_bldnode) self.incpaths_lst.append(Params.g_build.m_srcnode) # now process the include paths tree = Params.g_build for dir in inc_lst: if os.path.isabs(dir): self.env.append_value('CPPPATH', dir) continue node = self.path.find_dir_lst(Utils.split_path(dir)) if not node: debug("node not found in ccroot:apply_incpaths "+str(dir), 'ccroot') continue if not node in lst: lst.append(node) Params.g_build.rescan(node) self.bld_incpaths_lst.append(node) # now the nodes are added to self.incpaths_lst def apply_type_vars(self): # the subtype, used for all sorts of evil things if not self.subtype: if self.m_type in 'program staticlib plugin'.split(): self.subtype = self.m_type else: self.subtype = 'shlib' # if the subtype defines uselib to add, add them st = self.env[self.subtype+'_USELIB'] if st: self.uselib = self.uselib + ' ' + st # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc # so when we make a cppobj of the type shlib, CXXFLAGS are modified accordingly for var in self.p_type_vars: compvar = '_'.join([self.m_type, var]) #print compvar value = self.env[compvar] if value: self.env.append_value(var, value) def apply_link(self): if self.m_type=='staticlib': linktask = self.create_task('ar_link_static', self.env) else: linktask = self.create_task(self.m_type_initials+'_link', self.env) outputs = [t.m_outputs[0] for t in self.compiled_tasks] linktask.set_inputs(outputs) linktask.set_outputs(self.path.find_build(get_target_name(self))) self.link_task = linktask def apply_lib_vars(self): env = self.env # 1. the case of the libs defined in the project (visit ancestors first) # the ancestors external libraries (uselib) will be prepended uselib = self.to_list(self.uselib) seen = [] names = [] + self.to_list(self.uselib_local) # consume a copy of the list of names while names: x = names.pop(0) # visit dependencies only once if x in seen: continue # object does not exist ? y = Object.name_to_obj(x) if not y: fatal('object not found in uselib_local: obj %s uselib %s' % (self.name, x)) continue # object has ancestors to process: add them to the end of the list if y.uselib_local: lst = y.to_list(y.uselib_local) for u in lst: if u in seen: continue names.append(u) # safe to process the current object if not y.m_posted: y.post() seen.append(x) if y.m_type == 'shlib': env.append_value('LIB', y.target) elif y.m_type == 'plugin': if sys.platform == 'darwin': env.append_value('PLUGIN', y.target) else: env.append_value('LIB', y.target) elif y.m_type == 'staticlib': env.append_value('STATICLIB', y.target) elif y.m_type == 'objects': pass else: error('%s has unknown object type %s, in apply_lib_vars, uselib_local.' % (y.name, y.m_type)) # add the link path too tmp_path = y.path.bldpath(self.env) if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path) # set the dependency over the link task if y.link_task is not None: self.link_task.set_run_after(y.link_task) dep_nodes = getattr(self.link_task, 'dep_nodes', []) self.link_task.dep_nodes = dep_nodes + y.link_task.m_outputs # add ancestors uselib too # TODO potential problems with static libraries ? morelibs = y.to_list(y.uselib) for v in morelibs: if v in uselib: continue uselib = [v]+uselib # 2. the case of the libs defined outside for x in uselib: for v in self.p_flag_vars: val = self.env[v+'_'+x] if val: self.env.append_value(v, val) def apply_objdeps(self): "add the .o files produced by some other object files in the same manner as uselib_local" seen = [] names = self.to_list(self.add_objects) while names: x = names[0] # visit dependencies only once if x in seen: names = names[1:] continue # object does not exist ? y = Object.name_to_obj(x) if not y: error('object not found in add_objects: obj %s add_objects %s' % (self.name, x)) names = names[1:] continue # object has ancestors to process first ? update the list of names if y.add_objects: added = 0 lst = y.to_list(y.add_objects) lst.reverse() for u in lst: if u in seen: continue added = 1 names = [u]+names if added: continue # list of names modified, loop # safe to process the current object if not y.m_posted: y.post() seen.append(x) self.link_task.m_inputs += y.out_nodes def apply_obj_vars(self): lib_st = self.env['LIB_ST'] staticlib_st = self.env['STATICLIB_ST'] libpath_st = self.env['LIBPATH_ST'] staticlibpath_st = self.env['STATICLIBPATH_ST'] # FIXME self.addflags('CPPFLAGS', self.cppflags) app = self.env.append_unique for i in self.env['RPATH']: app('LINKFLAGS', i) for i in self.env['LIBPATH']: app('LINKFLAGS', libpath_st % i) for i in self.env['LIBPATH']: app('LINKFLAGS', staticlibpath_st % i) if self.env['STATICLIB']: self.env.append_value('LINKFLAGS', self.env['STATICLIB_MARKER']) k = [(staticlib_st % i) for i in self.env['STATICLIB']] app('LINKFLAGS', k) # fully static binaries ? if not self.env['FULLSTATIC']: if self.env['STATICLIB'] or self.env['LIB']: self.env.append_value('LINKFLAGS', self.env['SHLIB_MARKER']) app('LINKFLAGS', [lib_st % i for i in self.env['LIB']]) def apply_vnum(self): "use self.vnum and self.soname to modify the command line (un*x)" try: vnum = self.vnum except AttributeError: return # this is very unix-specific if sys.platform != 'darwin' and sys.platform != 'win32': nums = self.vnum.split('.') try: name3 = self.soname except AttributeError: name3 = self.link_task.m_outputs[0].m_name+'.'+self.vnum.split('.')[0] self.env.append_value('LINKFLAGS', '-Wl,-h,'+name3) def process_obj_files(self): if not hasattr(self, 'obj_files'): return for x in self.obj_files: node = self.path.find_source(x) self.link_task.m_inputs.append(node) def add_obj_file(self, file): """Small example on how to link object files as if they were source obj = bld.create_obj('cc') obj.add_obj_file('foo.o')""" if not hasattr(self, 'obj_files'): self.obj_files = [] if not 'process_obj_files' in self.meths: self.meths.add('process_obj_files') self.obj_files.append(file) def make_objects_available(self): """when we do not link; make the .o files available if we are only building .o files, tell which ones we built""" self.out_nodes = [] app = self.out_nodes.append for t in self.compiled_tasks: app(t.m_outputs[0]) taskgen(apply_verif) taskgen(install_target) feature('normal')(install_target) after('apply_objdeps')(install_target) taskgen(apply_dependencies) after('apply_incpaths')(apply_dependencies) before('apply_core')(apply_dependencies) taskgen(apply_incpaths) after('apply_type_vars')(apply_incpaths) taskgen(apply_type_vars) taskgen(apply_link) feature('normal')(apply_link) after('apply_core')(apply_link) taskgen(apply_lib_vars) after('apply_vnum')(apply_lib_vars) taskgen(apply_objdeps) feature('normal')(apply_objdeps) after('apply_obj_vars')(apply_objdeps) after('apply_vnum')(apply_objdeps) taskgen(apply_obj_vars) feature('normal')(apply_obj_vars) after('apply_lib_vars')(apply_obj_vars) taskgen(apply_vnum) feature('normal')(apply_vnum) after('apply_link')(apply_vnum) taskgen(process_obj_files) after('apply_link')(process_obj_files) taskgen(add_obj_file) taskgen(make_objects_available) feature('objects')(make_objects_available) after('apply_core')(make_objects_available) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/lua.py0000664000175000017500000000113610772264157022126 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Sebastian Schlingmann, 2008 # Thomas Nagy, 2008 (ita) import Object Object.declare_chain( name = 'luac', action = '${LUAC} -s -o ${TGT} ${SRC}', ext_in = '.lua', ext_out = '.luac', reentrant = 0, install = 'LUADIR', # env variable ) class lua_taskgen(Object.task_gen): def __init__(self): Object.task_gen.__init__(self) self.chmod = 0755 self.inst_var = '' self.inst_dir = '' def detect(conf): luac = conf.find_program('luac', var='LUAC') if not luac: conf.fatal('cannot find the compiler "luac"') lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/cc.py0000664000175000017500000000717310772264157021741 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "Base for c programs/libraries" import sys import Object, Params, Action, Utils from Params import debug import ccroot # <- do not remove from Object import taskgen, before, extension g_cc_flag_vars = [ 'FRAMEWORK', 'FRAMEWORKPATH', 'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', 'INCLUDE', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES'] EXT_CC = ['.c', '.cc'] CC_METHS = ['init_cc', 'apply_type_vars', 'apply_incpaths', 'apply_dependencies', 'apply_defines_cc', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cc'] Object.add_feature('cc', CC_METHS) g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS'] # TODO get rid of this class cc_taskgen(ccroot.ccroot_abstract): def __init__(self, type='program', subtype=None): ccroot.ccroot_abstract.__init__(self, type, subtype) self.m_type_initials = 'cc' self.ccflags='' self.cppflags='' self.features.append('cc') global g_cc_type_vars self.p_type_vars = g_cc_type_vars def init_cc(self): if hasattr(self, 'p_flag_vars'): self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars) else: self.p_flag_vars = g_cc_flag_vars if hasattr(self, 'p_type_vars'): self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars) else: self.p_type_vars = g_cc_type_vars def apply_obj_vars_cc(self): debug('apply_obj_vars_cc', 'ccroot') env = self.env app = env.append_unique cpppath_st = self.env['CPPPATH_ST'] self.addflags('CCFLAGS', self.ccflags) # local flags come first # set the user-defined includes paths for i in self.bld_incpaths_lst: app('_CCINCFLAGS', cpppath_st % i.bldpath(env)) app('_CCINCFLAGS', cpppath_st % i.srcpath(env)) # set the library include paths for i in env['CPPPATH']: app('_CCINCFLAGS', cpppath_st % i) # this is usually a good idea app('_CCINCFLAGS', cpppath_st % '.') app('_CCINCFLAGS', cpppath_st % env.variant()) tmpnode = self.path app('_CCINCFLAGS', cpppath_st % tmpnode.bldpath(env)) app('_CCINCFLAGS', cpppath_st % tmpnode.srcpath(env)) def apply_defines_cc(self): tree = Params.g_build lst = self.to_list(self.defines)+self.to_list(self.env['CCDEFINES']) milst = [] # now process the local defines for defi in lst: if not defi in milst: milst.append(defi) # CCDEFINES_ libs = self.to_list(self.uselib) for l in libs: val = self.env['CCDEFINES_'+l] if val: milst += val self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] y = self.env['CCDEFINES_ST'] self.env['_CCDEFFLAGS'] = [y%x for x in milst] def c_hook(self, node): # create the compilation task: cpp or cc task = self.create_task('cc', self.env) try: obj_ext = self.obj_ext except AttributeError: obj_ext = '_%s.o' % self.m_type[:2] task.m_scanner = ccroot.g_c_scanner task.path_lst = self.inc_paths task.defines = self.scanner_defines task.m_inputs = [node] task.m_outputs = [node.change_ext(obj_ext)] self.compiled_tasks.append(task) cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT} ${LINKFLAGS} ${_LIBDIRFLAGS} ${_LIBFLAGS}' Action.simple_action('cc', cc_str, 'GREEN', prio=100) Action.simple_action('cc_link', link_str, color='YELLOW', prio=111) Object.declare_order('apply_dependencies', 'apply_defines_cc', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cc', 'apply_obj_vars') taskgen(init_cc) before('apply_type_vars')(init_cc) taskgen(apply_obj_vars_cc) taskgen(apply_defines_cc) extension(EXT_CC)(c_hook) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/sunc++.py0000664000175000017500000000526310771525013022436 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) # Ralf Habacker, 2006 (rh) import os, optparse import Utils, Action, Params, Configure import ccroot, ar def find_cxx(conf): v = conf.env cc = None if v['CXX']: cc = v['CXX'] elif 'CXX' in os.environ: cc = os.environ['CXX'] #if not cc: cc = conf.find_program('g++', var='CXX') if not cc: cc = conf.find_program('c++', var='CXX') if not cc: conf.fatal('sunc++ was not found') v['CXX'] = cc def common_flags(conf): v = conf.env # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS _LIBDIRFLAGS _LIBFLAGS v['CXX_SRC_F'] = '' v['CXX_TGT_F'] = '-c -o ' v['CPPPATH_ST'] = '-I%s' # template for adding include paths # linker if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] v['CXXLNK_SRC_F'] = '' v['CXXLNK_TGT_F'] = '-o ' v['LIB_ST'] = '-l%s' # template for adding libs v['LIBPATH_ST'] = '-L%s' # template for adding libpaths v['STATICLIB_ST'] = '-l%s' v['STATICLIBPATH_ST'] = '-L%s' v['CXXDEFINES_ST'] = '-D%s' v['SHLIB_MARKER'] = '-Bdynamic' v['STATICLIB_MARKER'] = '-Bstatic' # program v['program_PATTERN'] = '%s' # shared library v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC'] v['shlib_LINKFLAGS'] = ['-G'] v['shlib_PATTERN'] = 'lib%s.so' # static lib v['staticlib_LINKFLAGS'] = ['-Bstatic'] v['staticlib_PATTERN'] = 'lib%s.a' def modifier_debug(conf): v = conf.env v['CXXFLAGS'] = [''] if conf.check_flags('-O2'): v['CXXFLAGS_OPTIMIZED'] = ['-O2'] v['CXXFLAGS_RELEASE'] = ['-O2'] if conf.check_flags('-g -DDEBUG'): v['CXXFLAGS_DEBUG'] = ['-g', '-DDEBUG'] if conf.check_flags('-g3 -O0 -DDEBUG'): v['CXXFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] try: debug_level = Params.g_options.debug_level.upper() except AttributeError: debug_level = ccroot.DEBUG_LEVELS.CUSTOM v.append_value('CXXFLAGS', v['CXXFLAGS_'+debug_level]) def detect(conf): find_cxx(conf) ar.find_cpp(conf) ar.find_ar(conf) conf.check_tool('cxx') common_flags(conf) conf.check_tool('checks') conf.check_features(kind='cpp') modifier_debug(conf) conf.add_os_flags('CXXFLAGS') conf.add_os_flags('CPPFLAGS') conf.add_os_flags('LINKFLAGS') def set_options(opt): try: opt.add_option('-d', '--debug-level', action = 'store', default = ccroot.DEBUG_LEVELS.RELEASE, help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), choices = ccroot.DEBUG_LEVELS.ALL, dest = 'debug_level') except optparse.OptionConflictError: # the suncc tool might have added that option already pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/java.py0000664000175000017500000000710010772264157022263 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "Java support" import os import Object, Action, Utils, Params class java_taskgen(Object.task_gen): s_default_ext = ['.java'] def __init__(self): Object.task_gen.__init__(self) self.jarname = '' self.jaropts = '' self.classpath = '' # Jar manifest attributes # TODO: Add manifest creation self.jar_mf_attributes = {} self.jar_mf_classpath = [] def apply(self): nodes_lst = [] if not self.classpath: if not self.env['CLASSPATH']: self.env['CLASSPATH'] = '..' + os.pathsep + '.' else: self.env['CLASSPATH'] = self.classpath find_source_lst = self.path.find_source_lst # first create the nodes corresponding to the sources for filename in self.to_list(self.source): node = find_source_lst(Utils.split_path(filename)) base, ext = os.path.splitext(filename) #node = self.path.find_build(filename) if not ext in self.s_default_ext: fatal("unknown file "+filename) task = self.create_task('javac', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('.class')) nodes_lst.append(task.m_outputs[0]) if self.jarname: task = self.create_task('jar_create', self.env) task.set_inputs(nodes_lst) task.set_outputs(self.path.find_build_lst(Utils.split_path(self.jarname))) if not self.env['JAROPTS']: if self.jaropts: self.env['JAROPTS'] = self.jaropts else: self.env.append_unique('JAROPTS', '-C %s .' % self.path.bldpath(self.env)) Action.simple_action('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${TGT[0].variant(env)} ${SRC}', color='BLUE', prio=10) Action.simple_action('jar_create', '${JAR} cvf ${TGT} ${JAROPTS}', color='GREEN', prio=50) def detect(conf): # If JAVA_PATH is set, we prepend it to the path list java_path = os.environ['PATH'].split(os.pathsep) if os.environ.has_key('JAVA_HOME'): java_path = [os.path.join(os.environ['JAVA_HOME'], 'bin')] + java_path conf.env['JAVA_HOME'] = os.environ['JAVA_HOME'] conf.find_program('javac', var='JAVAC', path_list=java_path) conf.find_program('java', var='JAVA', path_list=java_path) conf.find_program('jar', var='JAR', path_list=java_path) conf.env['JAVA_EXT'] = ['.java'] if os.environ.has_key('CLASSPATH'): conf.env['CLASSPATH'] = os.environ['CLASSPATH'] conf.hook(check_java_class) def check_java_class(conf, classname, with_classpath=None): """ Check if specified java class is installed. """ class_check_source = """ public class Test { public static void main(String[] argv) { Class lib; if (argv.length < 1) { System.err.println("Missing argument"); System.exit(77); } try { lib = Class.forName(argv[0]); } catch (ClassNotFoundException e) { System.err.println("ClassNotFoundException"); System.exit(1); } lib = null; System.exit(0); } } """ import shutil javatestdir = '.waf-javatest' classpath = javatestdir if conf.env['CLASSPATH']: classpath += os.pathsep + conf.env['CLASSPATH'] if isinstance(with_classpath, str): classpath += os.pathsep + with_classpath shutil.rmtree(javatestdir, True) os.mkdir(javatestdir) java_file = open(os.path.join(javatestdir, 'Test.java'), 'w') java_file.write(class_check_source) java_file.close() # Compile the source os.popen(conf.env['JAVAC'] + ' ' + os.path.join(javatestdir, 'Test.java')) (jstdin, jstdout, jstderr) = os.popen3(conf.env['JAVA'] + ' -cp ' + classpath + ' Test ' + classname) found = not bool(jstderr.read()) conf.check_message('Java class %s' % classname, "", found) shutil.rmtree(javatestdir, True) return found lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/batched_cc.py0000664000175000017500000000665010771537440023407 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) """ Batched builds - compile faster instead of compiling object files one by one, c/c++ compilers are often able to compile at once: cc -c ../file1.c ../file2.c ../file3.c Files are output on the directory where the compiler is called, and dependencies are more difficult to track (do not run the command on all source files if only one file changes) As such, we do as if the files were compiled one by one, but no command is actually run: replace each cc/cpp Task by a TaskSlave A new task called TaskMaster collects the signatures from each slave and finds out the command-line to run. To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds it is only necessary to import this module in the configuration (no other change required) """ EXT_C = ['.c', '.cc', '.cpp', '.cxx'] import shutil, os import Action, Object, Task, ccroot, Params from Object import extension class TaskMaster(Task.Task): def __init__(self, action_name, env, priority=92, normal=1, master=None): Task.Task.__init__(self, action_name, env, prio=priority, normal=normal) self.slaves=[] self.m_inputs2=[] self.m_outputs2=[] def add_slave(self, slave): self.slaves.append(slave) self.set_run_after(slave) def may_start(self): for t in self.m_run_after: if not t.m_hasrun: return 0 for t in self.slaves: self.m_inputs.append(t.m_inputs[0]) self.m_outputs.append(t.m_outputs[0]) if t.m_must_run: self.m_inputs2.append(t.m_inputs[0]) self.m_outputs2.append(t.m_outputs[0]) return 1 def run(self): tmpinputs = self.m_inputs self.m_inputs = self.m_inputs2 tmpoutputs = self.m_outputs self.m_outputs = self.m_outputs2 ret = self.m_action.run(self) env = self.env() rootdir = Params.g_build.m_srcnode.abspath(env) # unfortunately building the files in batch mode outputs them in the current folder (the build dir) # now move the files from the top of the builddir to the correct location for i in self.m_outputs: name = i.m_name if name[-1] == "s": name = name[:-1] # extension for shlib is .os, remove the s shutil.move(name, i.bldpath(env)) self.m_inputs = tmpinputs self.m_outputs = tmpoutputs return ret class TaskSlave(Task.Task): def __init__(self, action_name, env, priority=90, normal=1, master=None): Task.Task.__init__(self, action_name, env, priority, normal) self.m_master = master def get_display(self): return "* skipping "+ self.m_inputs[0].m_name def update_stat(self): self.m_executed=1 def must_run(self): self.m_must_run = Task.Task.must_run(self) return self.m_must_run def run(self): return 0 def can_retrieve_cache(self, sig): return None def create_task_cxx_new(self, node): try: mm = self.mastertask except AttributeError: mm = TaskMaster("all_"+self.m_type_initials, self.env) self.mastertask = mm task = TaskSlave(self.m_type_initials, self.env, 40, master=mm) self.m_tasks.append(task) mm.add_slave(task) task.set_inputs(node) task.set_outputs(node.change_ext('.o')) self.compiled_tasks.append(task) cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRC}' Action.simple_action('all_cc', cc_str, 'GREEN') cpp_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRC}' Action.simple_action('all_cpp', cpp_str, color='GREEN') extension(EXT_C)(create_task_cxx_new) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/dmd.py0000664000175000017500000000324010771525013022075 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) # Thomas Nagy, 2008 (ita) import sys import ar def find_dmd(conf): v = conf.env d_compiler = None if v['D_COMPILER']: d_compiler = v['D_COMPILER'] if not d_compiler: d_compiler = conf.find_program('dmd', var='D_COMPILER') if not d_compiler: return 0 v['D_COMPILER'] = d_compiler def common_flags(conf): v = conf.env # _DFLAGS _DIMPORTFLAGS _DLIBDIRFLAGS _DLIBFLAGS # Compiler is dmd so 'gdc' part will be ignored, just # ensure key is there, so wscript can append flags to it v['DFLAGS'] = {'gdc': [], 'dmd': ['-version=Posix']} v['D_SRC_F'] = '' v['D_TGT_F'] = '-c -of' v['DPATH_ST'] = '-I%s' # template for adding import paths # linker v['D_LINKER'] = v['D_COMPILER'] v['DLNK_SRC_F'] = '' v['DLNK_TGT_F'] = '-of' v['DLIB_ST'] = '-L-l%s' # template for adding libs v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths # linker debug levels v['DFLAGS_OPTIMIZED'] = ['-O'] v['DFLAGS_DEBUG'] = ['-g', '-debug'] v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug'] v['DLINKFLAGS'] = ['-quiet'] v['D_shlib_DFLAGS'] = [] v['D_shlib_LINKFLAGS'] = [] if sys.platform == "win32": v['D_program_PATTERN'] = '%s.exe' v['D_shlib_PATTERN'] = 'lib%s.dll' v['D_staticlib_PATTERN'] = 'lib%s.a' else: v['D_program_PATTERN'] = '%s' v['D_shlib_PATTERN'] = 'lib%s.so' v['D_staticlib_PATTERN'] = 'lib%s.a' def detect(conf): v = conf.env find_dmd(conf) ar.find_ar(conf) conf.check_tool('d') common_flags(conf) def set_options(opt): pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/ar.py0000664000175000017500000000207310771525013021736 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006-2008 (ita) # Ralf Habacker, 2006 (rh) "ar and ranlib" import os, sys import Action ar_str = '${AR} ${ARFLAGS} ${TGT} ${SRC} && ${RANLIB} ${RANLIBFLAGS} ${TGT}' # FIXME if sys.platform == "win32": ar_str = '${AR} s${ARFLAGS} ${TGT} ${SRC}' Action.simple_action('ar_link_static', ar_str, color='YELLOW', prio=101) def detect(conf): comp = conf.find_program('ar', var='AR') if not comp: return ranlib = conf.find_program('ranlib', var='RANLIB') if not ranlib: return v = conf.env v['AR'] = comp v['ARFLAGS'] = 'r' v['RANLIB'] = ranlib v['RANLIBFLAGS'] = '' def find_ar(conf): v = conf.env conf.check_tool('ar') if not v['AR']: conf.fatal('ar is required for static libraries - not found') def find_cpp(conf): v = conf.env cpp = None if v['CPP']: cpp = v['CPP'] elif 'CPP' in os.environ: cpp = os.environ['CPP'] if not cpp: cpp = conf.find_program('cpp', var='CPP') if not cpp: cpp = v['CC'] if not cpp: cpp = v['CXX'] v['CPP'] = cpp lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/tex.py0000664000175000017500000001574110772264157022154 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) "TeX/LaTeX/PDFLaTeX support" import os, re import Utils, Params, Action, Object, Runner, Scan from Params import error, warning, debug, fatal re_tex = re.compile(r'\\(?Pinclude|import|bringin){(?P[^{}]*)}', re.M) class tex_scanner(Scan.scanner): def __init__(self): Scan.scanner.__init__(self) def scan(self, task, node): env = task.env() nodes = [] names = [] if not node: return (nodes, names) fi = open(node.abspath(env), 'r') code = fi.read() fi.close() curdirnode = task.curdirnode abs = curdirnode.abspath() for match in re_tex.finditer(code): path = match.group('file') if path: for k in ['', '.tex', '.ltx']: # add another loop for the tex include paths? debug("trying %s%s" % (path, k), 'tex') try: os.stat(abs+os.sep+path+k) except OSError: continue found = path+k node = curdirnode.find_source(found) if node: nodes.append(node) else: debug('could not find %s' % path, 'tex') names.append(path) debug("found the following : %s and names %s" % (nodes, names), 'tex') return (nodes, names) g_tex_scanner = tex_scanner() g_bibtex_re = re.compile('bibdata', re.M) def tex_build(task, command='LATEX'): env = task.env() if env['PROMPT_LATEX']: exec_cmd = Runner.exec_command_interact com = '%s %s' % (env[command], env.get_flat(command+'FLAGS')) else: exec_cmd = Runner.exec_command com = '%s %s %s' % (env[command], env.get_flat(command+'FLAGS'), '-interaction=batchmode') node = task.m_inputs[0] reldir = node.bld_dir(env) srcfile = node.srcpath(env) lst = [] for c in Utils.split_path(reldir): if c: lst.append('..') sr = os.path.join(*(lst + [srcfile])) sr2 = os.path.join(*(lst + [node.m_parent.srcpath(env)])) aux_node = node.change_ext('.aux') idx_node = node.change_ext('.idx') hash = '' old_hash = '' nm = aux_node.m_name docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux" latex_compile_cmd = 'cd %s && TEXINPUTS=%s:$TEXINPUTS %s %s' % (reldir, sr2, com, sr) warning('first pass on %s' % command) ret = exec_cmd(latex_compile_cmd) if ret: return ret # look in the .aux file if there is a bibfile to process try: file = open(aux_node.abspath(env), 'r') ct = file.read() file.close() except (OSError, IOError): error('erreur bibtex scan') else: fo = g_bibtex_re.findall(ct) # yes, there is a .aux file to process if fo: bibtex_compile_cmd = 'cd %s && BIBINPUTS=%s:$BIBINPUTS %s %s' % (reldir, sr2, env['BIBTEX'], docuname) warning('calling bibtex') ret = exec_cmd(bibtex_compile_cmd) if ret: error('error when calling bibtex %s' % bibtex_compile_cmd) return ret # look on the filesystem if there is a .idx file to process try: idx_path = idx_node.abspath(env) os.stat(idx_path) except OSError: error('erreur file.idx scan') else: makeindex_compile_cmd = 'cd %s && %s %s' % (reldir, env['MAKEINDEX'], idx_path) warning('calling makeindex') ret = exec_cmd(makeindex_compile_cmd) if ret: error('error when calling makeindex %s' % makeindex_compile_cmd) return ret i = 0 while i < 10: # prevent against infinite loops - one never knows i += 1 # watch the contents of file.aux old_hash = hash try: hash = Params.h_file(aux_node.abspath(env)) except KeyError: error('could not read aux.h -> %s' % aux_node.abspath(env)) pass # debug #print "hash is, ", hash, " ", old_hash # stop if file.aux does not change anymore if hash and hash == old_hash: break # run the command warning('calling %s' % command) ret = exec_cmd(latex_compile_cmd) if ret: error('error when calling %s %s' % (command, latex_compile_cmd)) return ret # 0 means no error return 0 latex_vardeps = ['LATEX', 'LATEXFLAGS'] def latex_build(task): return tex_build(task, 'LATEX') pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS'] def pdflatex_build(task): return tex_build(task, 'PDFLATEX') g_texobjs = ['latex','pdflatex'] class tex_taskgen(Object.task_gen): s_default_ext = ['.tex', '.ltx'] def __init__(self, type='latex'): Object.task_gen.__init__(self) global g_texobjs if not type in g_texobjs: fatal('type %s not supported for texobj' % type) self.m_type = type self.outs = '' # example: "ps pdf" self.prompt = 1 # prompt for incomplete files (else the batchmode is used) self.deps = '' def apply(self): tree = Params.g_build outs = self.outs.split() self.env['PROMPT_LATEX'] = self.prompt deps_lst = [] if self.deps: deps = self.to_list(self.deps) for filename in deps: n = self.path.find_source(filename) if not n in deps_lst: deps_lst.append(n) for filename in self.source.split(): base, ext = os.path.splitext(filename) if not ext in self.s_default_ext: continue node = self.path.find_source(filename) if not node: fatal('cannot find %s' % filename) if self.m_type == 'latex': task = self.create_task('latex', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('.dvi')) elif self.m_type == 'pdflatex': task = self.create_task('pdflatex', self.env) task.set_inputs(node) task.set_outputs(node.change_ext('.pdf')) else: fatal('no type or invalid type given in tex object (should be latex or pdflatex)') task.m_scanner = g_tex_scanner task.m_env = self.env task.curdirnode = self.path # add the manual dependencies if deps_lst: variant = node.variant(self.env) try: lst = tree.m_depends_on[variant][node] for n in deps_lst: if not n in lst: lst.append(n) except KeyError: tree.m_depends_on[variant][node] = deps_lst if self.m_type == 'latex': if 'ps' in outs: pstask = self.create_task('dvips', self.env) pstask.set_inputs(task.m_outputs) pstask.set_outputs(node.change_ext('.ps')) if 'pdf' in outs: pdftask = self.create_task('dvipdf', self.env) pdftask.set_inputs(task.m_outputs) pdftask.set_outputs(node.change_ext('.pdf')) elif self.m_type == 'pdflatex': if 'ps' in outs: pstask = self.create_task('pdf2ps', self.env) pstask.set_inputs(task.m_outputs) pstask.set_outputs(node.change_ext('.ps')) def detect(conf): v = conf.env for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex'.split(): conf.find_program(p, var=p.upper()) v[p.upper()+'FLAGS'] = '' v['DVIPSFLAGS'] = '-Ppdf' Action.simple_action('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', prio=60) Action.simple_action('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', prio=60) Action.simple_action('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', prio=60) Action.simple_action('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', prio=60) Action.simple_action('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', prio=60) Action.Action('latex', vars=latex_vardeps, func=latex_build, prio=40) Action.Action('pdflatex', vars=pdflatex_vardeps, func=pdflatex_build, prio=40) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/compiler_d.py0000664000175000017500000000177110771525013023455 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) import os, sys, imp, types import Utils, Action, Params, checks, Configure def detect(conf): if getattr(Params.g_options, 'check_dmd_first', None): test_for_compiler = ['dmd', 'gdc'] else: test_for_compiler = ['gdc', 'dmd'] for d_compiler in test_for_compiler: conf.check_tool(d_compiler) if conf.env['D_COMPILER']: conf.check_message("%s" % d_compiler, '', True) conf.env["COMPILER_D"] = d_compiler return conf.check_message("%s" % d_compiler, '', False) def set_options(opt): d_compiler_opts = opt.add_option_group("D Compiler Options") try: d_compiler_opts.add_option('--check-dmd-first', action = "store_true", help = 'checks for the gdc compiler before dmd (default is the other way round)', dest = 'check_dmd_first',default = False) except Exception: pass for d_compiler in ['gdc', 'dmd']: opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/python.py0000664000175000017500000002373110772264157022673 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2007 (ita) # Gustavo Carneiro (gjc), 2007 "Python support" import os, sys import Object, Action, Utils, Params, Common, Utils from Object import extension import pproc as subprocess EXT_PY = ['.py'] def process_py(self, node): if self.env['PYC']: t1 = self.create_task('pyc', self.env) t1.set_inputs(node) t1.set_outputs(node.change_ext('.pyc')) if self.env['PYO']: t2 = self.create_task('pyo', self.env) t2.set_inputs(node) t2.set_outputs(node.change_ext('.pyo')) if Params.g_install: inst_src = not self.env['NOPY'] install = {'var': self.inst_var, 'dir': self.inst_dir, 'chmod': self.chmod, 'src': inst_src} try: t2.install = install except: pass try: t1.install = install except: pass class py_taskgen(Object.task_gen): def __init__(self, env=None): Object.task_gen.__init__(self) self.inst_var = 'PYTHONDIR' self.inst_dir = '' self.chmod = 0644 Action.simple_action('pyc', '${PYTHON} ${PYFLAGS} -c ${PYCMD} ${SRC} ${TGT}', color='BLUE', prio=50) Action.simple_action('pyo', '${PYTHON} ${PYFLAGS_OPT} -c ${PYCMD} ${SRC} ${TGT}', color='BLUE', prio=50) def _get_python_variables(python_exe, variables, imports=['import sys']): """Run a python interpreter and print some variables""" program = list(imports) program.append('') for v in variables: program.append("print repr(%s)" % v) proc = subprocess.Popen([python_exe, "-c", '\n'.join(program)], stdout=subprocess.PIPE) output = proc.communicate()[0].split("\n") if proc.returncode: if Params.g_verbose: Params.warning("Python program to extract python configuration variables failed:\n%s" % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)])) raise ValueError return_values = [] for s in output: s = s.strip() if not s: continue if s == 'None': return_values.append(None) elif s[0] == "'" and s[-1] == "'": return_values.append(s[1:-1]) elif s[0].isdigit(): return_values.append(int(s)) else: break return return_values def check_python_headers(conf): """Check for headers and libraries necessary to extend or embed python. If successful, xxx_PYEXT and xxx_PYEMBED variables are defined in the enviroment (for uselib). PYEXT should be used for compiling python extensions, while PYEMBED should be used by programs that need to embed a python interpreter. Note: this test requires that check_python_version was previously executed and successful.""" try: import distutils except ImportError: return 0 env = conf.env python = env['PYTHON'] assert python, ("python is %r !" % (python,)) try: # Get some python configuration variables using distutils v = 'prefix SO SYSLIBS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED'.split() (python_prefix, python_SO, python_SYSLIBS, python_SHLIBS, python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED) = \ _get_python_variables(python, ["get_config_var('%s')" % x for x in v], ['from distutils.sysconfig import get_config_var']) except ValueError: conf.fatal("Python development headers not found (-v for details).") env['pyext_PATTERN'] = '%s'+python_SO # Check for python libraries for embedding if python_SYSLIBS is not None: for lib in python_SYSLIBS.split(): if lib.startswith('-l'): lib = lib[2:] # strip '-l' env.append_value('LIB_PYEMBED', lib) if python_SHLIBS is not None: for lib in python_SHLIBS.split(): if lib.startswith('-l'): lib = lib[2:] # strip '-l' env.append_value('LIB_PYEMBED', lib) lib = conf.create_library_configurator() lib.name = 'python' + env['PYTHON_VERSION'] lib.uselib = 'PYTHON' lib.code = ''' #ifdef __cplusplus extern "C" { #endif void Py_Initialize(void); void Py_Finalize(void); #ifdef __cplusplus } #endif int main(int argc, char *argv[]) { Py_Initialize(); Py_Finalize(); return 0; } ''' if python_LIBDIR is not None: lib.path = [python_LIBDIR] result = lib.run() else: result = 0 ## try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib) if not result: if python_LIBPL is not None: lib.path = [python_LIBPL] result = lib.run() else: result = 0 ## try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32) if not result: lib.path = [os.path.join(python_prefix, "libs")] lib.name = 'python' + env['PYTHON_VERSION'].replace('.', '') result = lib.run() if result: env['LIBPATH_PYEMBED'] = lib.path env.append_value('LIB_PYEMBED', lib.name) # according to # distutils.command.build_ext.build_ext.get_libraries.__doc__ # this might want to be OS/2 aswell. if sys.platform == 'win32' or (Py_ENABLE_SHARED is not None and sys.platform != 'darwin'): env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED'] env['LIB_PYEXT'] = env['LIB_PYEMBED'] # We check that pythonX.Y-config exists, and if it exists we # use it to get only the includes, else fall back to distutils. python_config = conf.find_program( 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])), var='PYTHON_CONFIG') if python_config: includes = [] for incstr in os.popen("%s %s --includes" % (python, python_config)).readline().strip().split(): # strip the -I or /I if (incstr.startswith('-I') or incstr.startswith('/I')): incstr = incstr[2:] # append include path, unless already given if incstr not in includes: includes.append(incstr) env['CPPPATH_PYEXT'] = list(includes) env['CPPPATH_PYEMBED'] = list(includes) else: env['CPPPATH_PYEXT'] = [INCLUDEPY] env['CPPPATH_PYEMBED'] = [INCLUDEPY] # Code using the Python API needs to be compiled with -fno-strict-aliasing if env['CC']: version = os.popen("%s --version" % env['CC']).readline() if '(GCC)' in version: env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing') env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing') if env['CXX']: version = os.popen("%s --version" % env['CXX']).readline() if '(GCC)' in version: env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing') env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing') # Test to see if it compiles header = conf.create_header_configurator() header.name = 'Python.h' header.define = 'HAVE_PYTHON_H' header.uselib = 'PYEXT' header.code = "#include \nint main(int argc, char *argv[]) { Py_Initialize(); Py_Finalize(); return 0; }" result = header.run() if not result: conf.fatal("Python development headers not found.") def check_python_version(conf, minver=None): """ Check if the python interpreter is found matching a given minimum version. minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4') of the actual python version found, and PYTHONDIR is defined, pointing to the site-packages directory appropriate for this python version, where modules/packages/extensions should be installed. """ assert minver is None or isinstance(minver, tuple) python = conf.env['PYTHON'] assert python, ("python is %r !" % (python,)) # Get python version string cmd = [python, "-c", "import sys\nfor x in sys.version_info: print str(x)"] Params.debug("Running python command %r" % cmd, 'python') proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) lines = proc.communicate()[0].split() assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) # compare python version with the minimum required result = (minver is None) or (pyver_tuple >= minver) if result: # define useful environment variables pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) conf.env['PYTHON_VERSION'] = pyver if 'PYTHONDIR' in os.environ: pydir = os.environ['PYTHONDIR'] else: if sys.platform == 'win32': (python_LIBDEST,) = \ _get_python_variables(python, ["get_config_var('LIBDEST')"], ['from distutils.sysconfig import get_config_var']) else: python_LIBDEST = None if python_LIBDEST is None: if conf.env['LIBDIR']: python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) else: python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) pydir = os.path.join(python_LIBDEST, "site-packages") conf.define('PYTHONDIR', pydir) conf.env['PYTHONDIR'] = pydir # Feedback pyver_full = '.'.join(map(str, pyver_tuple[:3])) if minver is None: conf.check_message_custom('Python version', '', pyver_full) else: minver_str = '.'.join(map(str, minver)) conf.check_message('Python version', ">= %s" % (minver_str,), result, option=pyver_full) if not result: conf.fatal("Python too old.") def check_python_module(conf, module_name): """ Check if the selected python interpreter can import the given python module. """ result = not subprocess.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name], stderr=subprocess.PIPE, stdout=subprocess.PIPE).wait() conf.check_message('Python module', module_name, result) if not result: conf.fatal("Python module not found.") def detect(conf): python = conf.find_program('python', var='PYTHON') if not python: return v = conf.env v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' v['PYFLAGS'] = '' v['PYFLAGS_OPT'] = '-O' v['PYC'] = getattr(Params.g_options, 'pyc', 1) v['PYO'] = getattr(Params.g_options, 'pyo', 1) # FIXME - this thing must be updated #v['pyext_INST_VAR'] = 'PYTHONDIR' #v['pyext_INST_DIR'] = '' #v['pyembed_INST_DIR'] = v['program_INST_DIR'] # now a small difference v['pyext_USELIB'] = 'PYEXT' v['pyembed_USELIB'] = 'PYEMBED' conf.hook(check_python_version) conf.hook(check_python_headers) conf.hook(check_python_module) def set_options(opt): opt.add_option('--nopyc', action = 'store_false', default = 1, help = 'no pyc files (configuration)', dest = 'pyc') opt.add_option('--nopyo', action = 'store_false', default = 1, help = 'no pyo files (configuration)', dest = 'pyo') extension(EXT_PY)(process_py) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/preproc.py0000664000175000017500000004520410771525013023011 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006-2008 (ita) #C/C++ preprocessor for finding dependencies #TODO: more varargs, pragma once import re, sys, os, string, types if __name__ == '__main__': sys.path = ['.', '..'] + sys.path import Params from Params import debug, error, warning import traceback class PreprocError(Exception): pass g_findall = 1 'search harder for project includes' use_trigraphs = 0 'apply the trigraph rules first' strict_quotes = 0 "Keep <> for system includes (do not search for those includes)" g_optrans = { 'not':'!', 'and':'&&', 'bitand':'&', 'and_eq':'&=', 'or':'||', 'bitor':'|', 'or_eq':'|=', 'xor':'^', 'xor_eq':'^=', 'compl':'~', } "these ops are for c++, to reset, set an empty dict" # ignore #warning and #error re_lines = re.compile(\ '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', re.IGNORECASE | re.MULTILINE) re_mac = re.compile("^[a-zA-Z_]\w*") re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE) re_nl = re.compile('\\\\\r*\n', re.MULTILINE) re_cpp = re.compile(\ r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""", re.MULTILINE) trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')] chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39} NUM = 'i' OP = 'O' IDENT = 'T' STR = 's' CHAR = 'c' tok_types = [NUM, STR, IDENT, OP] exp_types = [ r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""", r'L?"([^"\\]|\\.)*"', r'[a-zA-Z_]\w*', r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]', ] re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M) accepted = 'a' ignored = 'i' undefined = 'u' skipped = 's' def repl(m): s = m.group(1) if s is not None: return ' ' s = m.group(3) if s is None: return '' return s def filter_comments(filename): # return a list of tuples : keyword, line f = open(filename, "r") code = f.read() f.close() if use_trigraphs: for (a, b) in trig_def: code = code.split(a).join(b) code = re_nl.sub('', code) code = re_cpp.sub(repl, code) return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)] prec = {} # op -> number, needed for such expressions: #if 1 && 2 != 0 ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] for x in range(len(ops)): syms = ops[x] for u in syms.split(): prec[u] = x def reduce_nums(val_1, val_2, val_op): #print val_1, val_2, val_op # pass two values, return a value # now perform the operation, make certain a and b are numeric try: a = 0 + val_1 except TypeError: a = int(val_1) try: b = 0 + val_2 except TypeError: b = int(val_2) d = val_op if d == '%': c = a%b elif d=='+': c = a+b elif d=='-': c = a-b elif d=='*': c = a*b elif d=='/': c = a/b elif d=='^': c = a^b elif d=='|': c = a|b elif d=='||': c = int(a or b) elif d=='&': c = a&b elif d=='&&': c = int(a and b) elif d=='==': c = int(a == b) elif d=='!=': c = int(a != b) elif d=='<=': c = int(a <= b) elif d=='<': c = int(a < b) elif d=='>': c = int(a > b) elif d=='>=': c = int(a >= b) elif d=='^': c = int(a^b) elif d=='<<': c = a<>': c = a>>b else: c = 0 return c def get_expr(lst, defs, ban): if not lst: return ([], [], []) (p, v) = lst[0] if p == NUM: return (p, v, lst[1:]) elif p == STR: try: (p2, v2) = lst[1] if p2 == STR: return (p, v+v2, lst[2:]) except IndexError: pass return (p, v, lst[1:]) elif p == OP: if v in ['+', '-', '!', '~', '#']: (p2, v2, lst2) = get_expr(lst[1:], defs, ban) if v == '#': if p2 != IDENT: raise PreprocError, "ident expected %s" % str(lst) return get_expr([(STR, v2)]+lst2, defs, ban) if p2 != NUM: raise PreprocError, "num expected %s" % str(lst) if v == '+': return (p2, v2, lst2) elif v == '-': return (p2, - int(v2), lst2) elif v == '!': return (p2, int(not int(v2)), lst2) elif v == '~': return (p2, ~ int(v2), lst2) return (p2, v2, lst2) elif v == '(': count_par = 0 i = 0 for _, v in lst: if v == ')': count_par -= 1 if count_par == 0: break elif v == '(': count_par += 1 i += 1 else: raise PreprocError, "rparen expected %s" % str(lst) ret = process_tokens(lst[1:i], defs, ban) if len(ret) == 1: (p, v) = ret[0] return (p, v, lst[i+1:]) else: #return (None, lst1, lst[i+1:]) raise PreprocError, "cannot reduce %s" % str(lst) elif p == IDENT: if len(lst)>1: (p2, v2) = lst[1] if v2 == "##": # token pasting, reevaluate the identifier obtained (p3, v3) = lst[2] if p3 != IDENT and p3 != NUM and p3 != OP: raise PreprocError, "%s: ident expected after '##'" % str(lst) return get_expr([(p, v+v3)]+lst[3:], defs, ban) if v.lower() == 'defined': (p2, v2) = lst[1] off = 2 if v2 == '(': (p3, v3) = lst[2] if p3 != IDENT: raise PreprocError, 'expected an identifier after a "defined("' (p2, v2) = lst[3] if v2 != ')': raise PreprocError, 'expected a ")" after a "defined(x"' off = 4 elif p2 != IDENT: raise PreprocError, 'expected a "(" or an identifier after a defined' x = 0 if v2 in defs: x = 1 #return get_expr([(NUM, x)] + lst[off:], defs, ban) return (NUM, x, lst[off:]) elif not v in defs or v in ban: if "waf_include" in ban: return (p, v, lst[1:]) else: return (NUM, 0, lst[1:]) # tokenize on demand if type(defs[v]) is types.StringType: v, k = extract_macro(defs[v]) defs[v] = k macro_def = defs[v] if not macro_def[0]: # simple macro, substitute, and reevaluate lst = macro_def[1] + lst[1:] return get_expr(lst, defs, ban) else: # collect the arguments for the funcall params = [] i = 1 p2, v2 = lst[i] if p2 != OP or v2 != '(': raise PreprocError, "invalid function call '%s'" % v one_param = [] count_paren = 0 try: while 1: i += 1 p2, v2 = lst[i] if p2 == OP and count_paren == 0: if v2 == '(': one_param.append((p2, v2)) count_paren += 1 elif v2 == ')': if one_param: params.append(one_param) lst = lst[i+1:] break elif v2 == ',': if not one_param: raise PreprocError, "empty param in funcall %s" % p params.append(one_param) one_param = [] else: one_param.append((p2, v2)) else: one_param.append((p2, v2)) if v2 == '(': count_paren += 1 elif v2 == ')': count_paren -= 1 except IndexError, e: #raise PreprocError, 'invalid function call %s: missing ")"' % p raise # substitute the arguments within the define expression accu = [] table = macro_def[0] for p2, v2 in macro_def[1]: if p2 == IDENT and v2 in table: accu += params[table[v2]] else: if v2 == '__VA_ARGS__': # first collect the tokens va_toks = [] st = len(macro_def[0]) pt = len(params) for x in params[pt-st+1:]: va_toks.extend(x) va_toks.append((OP, ',')) if va_toks: va_toks.pop() # extra comma if len(accu)>1: (p3, v3) = accu[-1] (p4, v4) = accu[-2] if v3 == '##': # remove the token paste accu.pop() if v4 == ',' and pt < st: # remove the comma accu.pop() accu += va_toks else: accu.append((p2, v2)) return get_expr(accu + lst, defs, ban+[v]) def process_tokens(lst, defs, ban): accu = [] while lst: p, v, nlst = get_expr(lst, defs, ban) if p == NUM: if not nlst: return [(p, v)] # finished op1, ov1 = nlst[0] if op1 != OP: raise PreprocError, "op expected %s" % str(lst) if ov1 == '?': i = 0 count_par = 0 for _, k in nlst: if k == ')': count_par -= 1 elif k == '(': count_par += 1 elif k == ':' and count_par == 0: break i += 1 else: raise PreprocError, "ending ':' expected %s" % str(lst) if reduce_nums(v, 0, '+'): lst = nlst[1:i] else: lst = nlst[i+1:] continue elif ov1 == ',': lst = nlst[1:] continue p2, v2, nlst = get_expr(nlst[1:], defs, ban) if p2 != NUM: raise PreprocError, "num expected after op %s" % str(lst) if nlst: # op precedence op3, ov3 = nlst[0] if prec[ov3] < prec[ov1]: #print "ov3", ov3, ov1 # as needed p4, v4, nlst2 = get_expr(nlst[1:], defs, ban) v5 = reduce_nums(v2, v4, ov3) lst = [(p, v), (op1, ov1), (NUM, v5)] + nlst2 continue # no op precedence or empty list, reduce the first tokens lst = [(NUM, reduce_nums(v, v2, ov1))] + nlst continue elif p == STR: if nlst: raise PreprocError, "sequence must terminate with a string %s" % str(nlst) return [(p, v)] return (None, None, []) def eval_macro(lst, adefs): # look at the result, and try to return a 0/1 result ret = process_tokens(lst, adefs, []) if not ret: raise PreprocError, "missing tokens to evaluate %s" % str(lst) p, v = ret[0] return int(v) != 0 def try_exists(node, list): lst = []+list while lst: name = lst.pop(0) # it is not a build node, else we would already got it path = os.path.join(node.abspath(), name) try: os.stat(path) except OSError: #traceback.print_exc() return None node = node.find_dir_lst([name]) return node class c_parser(object): def __init__(self, nodepaths=None, strpaths=None, defines=None): #self.lines = txt.split('\n') self.lines = [] if defines is None: self.defs = {} else: self.defs = dict(defines) # make a copy self.state = [] self.env = None # needed for the variant when searching for files # include paths if strpaths is None: self.strpaths = [] else: self.strpaths = strpaths self.pathcontents = {} self.deps = [] self.deps_paths = [] if nodepaths is None: self.m_nodepaths = [] else: self.m_nodepaths = nodepaths self.m_nodes = [] self.m_names = [] # file added self.curfile = '' self.ban_includes = [] # dynamic cache try: self.parse_cache = Params.g_build.parse_cache except AttributeError: Params.g_build.parse_cache = {} self.parse_cache = Params.g_build.parse_cache def tryfind(self, filename): self.curfile = filename global g_findall if self.m_nodepaths: found = 0 for n in self.m_nodepaths: found = n.find_source(filename, create=0) if found: break # second pass for unreachable folders if not found and g_findall: lst = filename.split('/') if len(lst)>1: lst=lst[:-1] # take the folders only try: cache = Params.g_build.preproc_cache except AttributeError: Params.g_build.preproc_cache = cache = {} key = hash( (str(self.m_nodepaths), str(lst)) ) if not cache.get(key, None): cache[key] = 1 for n in self.m_nodepaths: node = try_exists(n, lst) if node: found = n.find_source(filename, create=0) if found: break if found: self.m_nodes.append(found) # Qt if filename[-4:] != '.moc': self.addlines(found.abspath(self.env)) if not found: if not filename in self.m_names: self.m_names.append(filename) else: found = 0 for p in self.strpaths: if not p in self.pathcontents.keys(): self.pathcontents[p] = os.listdir(p) if filename in self.pathcontents[p]: #print "file %s found in path %s" % (filename, p) np = os.path.join(p, filename) # screw Qt two times if filename[-4:] != '.moc': self.addlines(np) self.deps_paths.append(np) found = 1 if not found: pass #error("could not find %s " % filename) def addlines(self, filepath): pc = self.parse_cache debug("reading file %r" % filepath, 'preproc') if filepath in pc.keys(): self.lines = pc[filepath] + self.lines return try: lines = filter_comments(filepath) pc[filepath] = lines # memorize the lines filtered self.lines = lines + self.lines except IOError: raise PreprocError, "could not read the file %s" % filepath except Exception: if Params.g_verbose > 0: warning("parsing %s failed" % filepath) traceback.print_exc() def start(self, node, env): debug("scanning %s (in %s)" % (node.m_name, node.m_parent.m_name), 'preproc') self.env = env variant = node.variant(env) self.addlines(node.abspath(env)) if env['DEFLINES']: self.lines = [('define', x) for x in env['DEFLINES']] + self.lines while self.lines: (type, line) = self.lines.pop(0) try: self.process_line(type, line) except Exception, ex: if Params.g_verbose: warning("line parsing failed (%s): %s" % (str(ex), line)) traceback.print_exc() # debug only def start_local(self, filename): self.addlines(filename) #print self.lines while self.lines: (type, line) = self.lines.pop(0) try: self.process_line(type, line) except Exception, ex: if Params.g_verbose: warning("line parsing failed (%s): %s" % (str(ex), line)) traceback.print_exc() raise def process_line(self, token, line): ve = Params.g_verbose if ve: debug("line is %s - %s state is %s" % (token, line, self.state), 'preproc') state = self.state # make certain we define the state if we are about to enter in an if block if token in ['ifdef', 'ifndef', 'if']: state.append(undefined) elif token == 'endif': state.pop() # skip lines when in a dead 'if' branch, wait for the endif if not token in ['else', 'elif', 'endif']: if skipped in self.state or ignored in self.state: return if token == 'if': ret = eval_macro(tokenize(line), self.defs) if ret: state[-1] = accepted else: state[-1] = ignored elif token == 'ifdef': m = re_mac.search(line) if m and m.group(0) in self.defs: state[-1] = accepted else: state[-1] = ignored elif token == 'ifndef': m = re_mac.search(line) if m and m.group(0) in self.defs: state[-1] = ignored else: state[-1] = accepted elif token == 'include' or token == 'import': (type, inc) = extract_include(line, self.defs) if inc in self.ban_includes: return if token == 'import': self.ban_includes.append(inc) if ve: debug("include found %s (%s) " % (inc, type), 'preproc') if type == '"' or not strict_quotes: if not inc in self.deps: self.deps.append(inc) self.tryfind(inc) elif token == 'elif': if state[-1] == accepted: state[-1] = skipped elif state[-1] == ignored: if eval_macro(tokenize(line), self.defs): state[-1] = accepted elif token == 'else': if state[-1] == accepted: state[-1] = skipped elif state[-1] == ignored: state[-1] = accepted elif token == 'define': m = re_mac.search(line) if m: name = m.group(0) if ve: debug("define %s %s" % (name, line), 'preproc') self.defs[name] = line else: raise PreprocError, "invalid define line %s" % line elif token == 'undef': m = re_mac.search(line) if m and m.group(0) in self.defs: self.defs.__delitem__(m.group(0)) #print "undef %s" % name elif token == 'pragma': if re_pragma_once.search(line.lower()): self.ban_includes.append(self.curfile) def extract_macro(txt): t = tokenize(txt) if re_fun.search(txt): p, name = t[0] p, v = t[1] if p != OP: raise PreprocError, "expected open parenthesis" i = 1 pindex = 0 params = {} wantident = 1 while 1: i += 1 p, v = t[i] if wantident: if p == IDENT: params[v] = pindex pindex += 1 elif v == '...': pass else: raise PreprocError, "expected ident" else: if v == ',': pass elif v == ')': break elif v == '...': raise PreprocError, "not implemented" wantident = not wantident return (name, [params, t[i+1:]]) else: (p, v) = t[0] return (v, [[], t[1:]]) re_include = re.compile('^\s*(<(?P.*)>|"(?P.*)")') def extract_include(txt, defs): m = re_include.search(txt) if m: if m.group('a'): return '<', m.group('a') if m.group('b'): return '"', m.group('b') # perform preprocessing and look at the result, it must match an include tokens = process_tokens(tokens, defs, ['waf_include']) p, v = tokens[0] if p != STR: raise PreprocError, "could not parse include %s" % txt return ('"', v) def parse_char(txt): if not txt: raise PreprocError, "attempted to parse a null char" if txt[0] != '\\': return ord(txt) c = txt[1] if c == 'x': if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16) return int(txt[2:], 16) elif c.isdigit(): if c == '0' and len(txt)==2: return 0 for i in 3, 2, 1: if len(txt) > i and txt[1:1+i].isdigit(): return (1+i, int(txt[1:1+i], 8)) else: try: return chr_esc[c] except KeyError: raise PreprocError, "could not parse char literal '%s'" % txt def tokenize(s): ret = [] for match in re_clexer.finditer(s): m = match.group for name in tok_types: v = m(name) if v: if name == IDENT: try: v = g_optrans[v]; name = OP except KeyError: # c++ specific if v.lower() == "true": v = 1 name = NUM elif v.lower() == "false": v = 0 name = NUM elif name == NUM: if m('oct'): v = int(v, 8) elif m('hex'): v = int(m('hex'), 16) elif m('n0'): v = m('n0') else: v = m('char') if v: v = parse_char(v) else: v = m('n2') or m('n4') elif name == OP: if v == '%:': v='#' elif v == '%:%:': v='##' ret.append((name, v)) break return ret # quick test # if __name__ == "__main__": Params.g_verbose = 2 Params.g_zones = ['preproc'] class dum: def __init__(self): self.parse_cache = {} Params.g_build = dum() try: arg = sys.argv[1] except IndexError: arg = "file.c" paths = ['.'] f = open(arg, "r"); txt = f.read(); f.close() m1 = [[], [(NUM, 1), (OP, '+'), (NUM, 2)]] fun1 = [[(IDENT, 'x'), (IDENT, 'y')], [(IDENT, 'x'), (OP, '##'), (IDENT, 'y')]] fun2 = [[(IDENT, 'x'), (IDENT, 'y')], [(IDENT, 'x'), (OP, '*'), (IDENT, 'y')]] def test(x): y = process_tokens(tokenize(x), {'m1':m1, 'fun1':fun1, 'fun2':fun2}, []) #print x, y test("0&&2<3") test("(5>1)*6") test("1+2+((3+4)+5)+6==(6*7)/2==1*-1*-1") test("1,2,3*9,9") test("1?77:88") test("0?77:88") test("1?1,(0?5:9):3,4") test("defined inex") test("defined(inex)") test("m1*3") test("7*m1*3") test("fun1(m,1)") test("fun2(2, fun1(m, 1))") #test("foo##.##h") gruik = c_parser(strpaths = paths) gruik.start_local(arg) print "we have found the following dependencies" print gruik.deps print gruik.deps_paths #f = open(arg, "r") #txt = f.read() #f.close() #print tokenize(txt) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/perl.py0000664000175000017500000000743610771537440022315 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # andersg at 0x63.nu 2007 import os import pproc as subprocess import Action, Object, Node, Params from Object import extension xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' EXT_XS = ['.xs'] def xsubpp_file(self, node): gentask = self.create_task('xsubpp') gentask.set_inputs(node) gentask.set_outputs(node.change_ext('.c')) cctask = self.create_task('cc') cctask.set_inputs(gentask.m_outputs) cctask.set_outputs(node.change_ext('.o')) Action.simple_action('xsubpp', xsubpp_str, color='BLUE', prio=10) def check_perl_version(conf, minver=None): """ Checks if perl is installed. If installed the variable PERL will be set in environment. Perl binary can be overridden by --with-perl-binary config variable """ res = True if not getattr(Params.g_options, 'perlbinary', None): perl = conf.find_program("perl", var="PERL") if not perl: return False else: perl = Params.g_options.perlbinary conf.env['PERL'] = perl version = os.popen(perl + " -e'printf \"%vd\", $^V'").read() if not version: res = False version = "Unknown" elif not minver is None: ver = tuple(map(int, version.split("."))) if ver < minver: res = False if minver is None: cver = "" else: cver = ".".join(map(str,minver)) conf.check_message("perl", cver, res, version) return res def check_perl_module(conf, module): """ Check if specified perlmodule is installed. Minimum version can be specified by specifying it after modulename like this: conf.check_perl_module("Some::Module 2.92") """ cmd = [conf.env['PERL'], '-e', 'use %s' % module] # TODO are you certain ? r = subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0 conf.check_message("perl module %s" % module, "", r) return r def check_perl_ext_devel(conf): """ Check for configuration needed to build perl extensions. Sets different xxx_PERLEXT variables in the environment. Also sets the ARCHDIR_PERL variable useful as installation path, which can be overridden by --with-perl-archdir option. """ if not conf.env['PERL']: return False perl = conf.env['PERL'] conf.env["LINKFLAGS_PERLEXT"] = os.popen(perl + " -MConfig -e'print $Config{lddlflags}'").read() conf.env["CPPPATH_PERLEXT"] = os.popen(perl + " -MConfig -e'print \"$Config{archlib}/CORE\"'").read() conf.env["CCFLAGS_PERLEXT"] = os.popen(perl + " -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'").read() conf.env["XSUBPP"] = os.popen(perl + " -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'").read() conf.env["EXTUTILS_TYPEMAP"] = os.popen(perl + " -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'").read() if not getattr(Params.g_options, 'perlarchdir', None): conf.env["ARCHDIR_PERL"] = os.popen(perl + " -MConfig -e'print $Config{sitearch}'").read() else: conf.env["ARCHDIR_PERL"] = getattr(Params.g_options, 'perlarchdir') conf.env["perlext_PATTERN"] = '%s.' + os.popen(perl + " -MConfig -e'print $Config{dlext}'").read() conf.env["perlext_USELIB"] = "PERL PERLEXT" return True def detect(conf): conf.hook(check_perl_version) conf.hook(check_perl_ext_devel) conf.hook(check_perl_module) def set_options(opt): opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None) opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None) extension(EXT_XS)(xsubpp_file) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/nasm.py0000664000175000017500000000227310771537440022303 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2008 """ Nasm processing """ import os import Action, Object from Object import taskgen, before, extension nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}' EXT_NASM = ['.s'] def apply_nasm_vars(self): # flags if hasattr(self, 'nasm_flags'): for flag in self.to_list(self.nasm_flags): self.env.append_value('NASM_FLAGS', flag) # includes - well, if we suppose it works with c processing if hasattr(self, 'includes'): for inc in self.to_list(self.includes): self.env.append_value('NASM_INCLUDES', '-I %s' % inc.srcpath(self.env)) def nasm_file(self, node): o_node = node.change_ext('.o') task = self.create_task('nasm') task.set_inputs(node) task.set_outputs(o_node) self.compiled_tasks.append(task) self.meths.add('apply_nasm_vars') # create our action here Action.simple_action('nasm', nasm_str, color='BLUE', prio=40) def detect(conf): nasm = conf.find_program('nasm', var='NASM') if not nasm: conf.fatal("could not find nasm, install it or set PATH env var.") taskgen(apply_nasm_vars) before('apply_link')(apply_nasm_vars) extension(EXT_NASM)(nasm_file) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/gob2.py0000664000175000017500000000066710771525013022174 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Ali Sabil, 2007 import Object Object.declare_chain( name = 'gob2', action = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}', ext_in = '.gob', ext_out = '.c' ) def detect(conf): gob2 = conf.find_program('gob2', var='GOB2') if not gob2: conf.fatal('could not find the gob2 compiler') conf.env['GOB2'] = gob2 conf.env['GOB2FLAGS'] = '' lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/gdc.py0000664000175000017500000000304310771525013022067 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) import sys import ar def find_gdc(conf): v = conf.env d_compiler = None if v['D_COMPILER']: d_compiler = v['D_COMPILER'] if not d_compiler: d_compiler = conf.find_program('gdc', var='D_COMPILER') if not d_compiler: return 0 v['D_COMPILER'] = d_compiler def common_flags(conf): v = conf.env # _DFLAGS _DIMPORTFLAGS _DLIBDIRFLAGS _DLIBFLAGS # for mory info about the meaning of this dict see dmd.py v['DFLAGS'] = {'gdc':[], 'dmd':[]} v['D_SRC_F'] = '' v['D_TGT_F'] = '-c -o ' v['DPATH_ST'] = '-I%s' # template for adding import paths # linker v['D_LINKER'] = v['D_COMPILER'] v['DLNK_SRC_F'] = '' v['DLNK_TGT_F'] = '-o ' v['DLIB_ST'] = '-l%s' # template for adding libs v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths # debug levels v['DLINKFLAGS'] = [] v['DFLAGS_OPTIMIZED'] = ['-O3'] v['DFLAGS_DEBUG'] = ['-O0'] v['DFLAGS_ULTRADEBUG'] = ['-O0'] v['D_shlib_DFLAGS'] = [] v['D_shlib_LINKFLAGS'] = ['-shared'] if sys.platform == "win32": v['D_program_PATTERN'] = '%s.exe' v['D_shlib_PATTERN'] = 'lib%s.dll' v['D_staticlib_PATTERN'] = 'lib%s.a' else: v['D_program_PATTERN'] = '%s' v['D_shlib_PATTERN'] = 'lib%s.so' v['D_staticlib_PATTERN'] = 'lib%s.a' def detect(conf): v = conf.env find_gdc(conf) ar.find_ar(conf) conf.check_tool('d') common_flags(conf) def set_options(opt): pass lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/__init__.py0000664000175000017500000000015710771525013023074 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/misc.py0000664000175000017500000003040410772264157022300 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006 (ita) """ Custom objects: - execute a function everytime - copy a file somewhere else """ import shutil, re, os, types import Object, Action, Node, Params, Task, Common import pproc as subprocess from Params import fatal, debug def copy_func(tsk): "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)" env = tsk.env() infile = tsk.m_inputs[0].abspath(env) outfile = tsk.m_outputs[0].abspath(env) try: shutil.copy2(infile, outfile) except OSError, IOError: return 1 else: if tsk.chmod: os.chmod(outfile, tsk.chmod) return 0 def action_process_file_func(tsk): "Ask the function attached to the task to process it" if not tsk.fun: fatal('task must have a function attached to it for copy_func to work!') return tsk.fun(tsk) class cmd_taskgen(Object.task_gen): "This object will call a command everytime" def __init__(self, type='none'): Object.task_gen.__init__(self) self.m_type = type self.prio = 1 self.fun = None self.inst_var = '' self.inst_dir = '' def apply(self): # create a task if not self.fun: fatal('cmdobj needs a function!') tsk = Task.TaskCmd(self.fun, self.env) tsk.prio = self.prio self.m_tasks.append(tsk) tsk.install = {'var': self.inst_var, 'dir': self.inst_dir} class copy_taskgen(Object.task_gen): "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)" def __init__(self, type='none'): Object.task_gen.__init__(self) self.source = '' self.target = '' self.chmod = '' self.fun = copy_func self.env = Params.g_build.env().copy() def apply(self): lst = self.to_list(self.source) for filename in lst: node = self.path.find_source(filename) if not node: fatal('cannot find input file %s for processing' % filename) target = self.target if not target or len(lst)>1: target = node.m_name # TODO the file path may be incorrect newnode = self.path.find_build(target) tsk = self.create_task('copy', self.env, 10) tsk.set_inputs(node) tsk.set_outputs(newnode) tsk.m_env = self.env tsk.fun = self.fun tsk.chmod = self.chmod if not tsk.env(): tsk.debug() fatal('task witout an environment') def subst_func(tsk): "Substitutes variables in a .in file" m4_re = re.compile('@(\w+)@', re.M) env = tsk.env() infile = tsk.m_inputs[0].abspath(env) outfile = tsk.m_outputs[0].abspath(env) file = open(infile, 'r') code = file.read() file.close() s = m4_re.sub(r'%(\1)s', code) dict = tsk.dict if not dict: names = m4_re.findall(code) for i in names: if env[i] and type(env[i]) is types.ListType : dict[i] = " ".join(env[i]) else: dict[i] = env[i] file = open(outfile, 'w') file.write(s % dict) file.close() return 0 class subst_taskgen(Object.task_gen): def __init__(self, type='none'): Object.task_gen.__init__(self) self.fun = subst_func self.dict = {} self.prio = 8 self.inst_var = '' self.inst_dir = '' def apply(self): lst = self.to_list(self.source) for filename in lst: node = self.path.find_source(filename) if not node: fatal('cannot find input file %s for processing' % filename) newnode = node.change_ext('') if self.dict and not self.env['DICT_HASH']: self.env = self.env.copy() self.env['DICT_HASH'] = hash(str(self.dict)) # <- pretty sure it wont work (ita) tsk = self.create_task('copy', self.env, self.prio) tsk.set_inputs(node) tsk.set_outputs(newnode) tsk.m_env = self.env tsk.fun = self.fun tsk.dict = self.dict tsk.dep_vars = ['DICT_HASH'] tsk.install = {'var': self.inst_var, 'dir': self.inst_dir} if not tsk.env(): tsk.debug() fatal('task witout an environment') class CommandOutputTask(Task.Task): def __init__(self, env, priority, command, command_node, command_args, stdin, stdout, cwd): Task.Task.__init__(self, 'command-output', env, prio=priority, normal=1) assert isinstance(command, (str, Node.Node)) self.command = command self.command_args = command_args self.stdin = stdin self.stdout = stdout self.cwd = cwd if command_node is not None: self.dep_nodes = [command_node] self.dep_vars = [] # additional environment variables to look class CommandOutput(Object.task_gen): CMD_ARGV_INPUT, CMD_ARGV_OUTPUT, CMD_ARGV_INPUT_DIR, CMD_ARGV_OUTPUT_DIR = range(4) def __init__(self, env=None): Object.task_gen.__init__(self) self.env = env if not self.env: self.env = Params.g_build.env().copy() self.stdin = None self.stdout = None # the command to execute self.command = None # whether it is an external command; otherwise it is assumed # to be an executable binary or script that lives in the # source or build tree. self.command_is_external = False # extra parameters (argv) to pass to the command (excluding # the command itself) self.argv = [] # task priority self.prio = 100 # dependencies to other objects -> this is probably not what you want (ita) # values must be 'task_gen' instances (not names!) self.dependencies = [] # dependencies on env variable contents self.dep_vars = [] # input files that are implicit, i.e. they are not # stdin, nor are they mentioned explicitly in argv self.hidden_inputs = [] # output files that are implicit, i.e. they are not # stdout, nor are they mentioned explicitly in argv self.hidden_outputs = [] # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here) self.cwd = None def _command_output_func(task): assert len(task.m_inputs) > 0 def input_path(node, template): if task.cwd is None: return template % node.bldpath(task.env()) else: return template % node.abspath() def output_path(node, template): fun = node.abspath if task.cwd is None: fun = node.bldpath return template % fun(task.env()) if isinstance(task.command, Node.Node): argv = [input_path(task.command, '%s')] else: argv = [task.command] for arg in task.command_args: if isinstance(arg, str): argv.append(arg) else: role, node, template = arg if role in (CommandOutput.CMD_ARGV_INPUT, CommandOutput.CMD_ARGV_INPUT_DIR): argv.append(input_path(node, template)) elif role in (CommandOutput.CMD_ARGV_OUTPUT, CommandOutput.CMD_ARGV_OUTPUT_DIR): argv.append(output_path(node, template)) else: raise AssertionError if task.stdin: stdin = file(input_path(task.stdin, '%s')) else: stdin = None if task.stdout: stdout = file(output_path(task.stdout, '%s'), "w") else: stdout = None if task.cwd is None: cwd = ('None (actually %r)' % os.getcwd()) else: cwd = repr(task.cwd) Params.debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" % (cwd, stdin, stdout, argv)) command = subprocess.Popen(argv, stdin=stdin, stdout=stdout, cwd=task.cwd) return command.wait() _command_output_func = staticmethod(_command_output_func) def apply(self): if self.command is None: Params.fatal("command-output missing command") if self.command_is_external: cmd = self.command cmd_node = None else: cmd_node = self.path.find_build(self.command, create=True) assert cmd_node is not None, ('''Could not find command '%s' in source tree. Hint: if this is an external command, use command_is_external=True''') % (self.command,) cmd = cmd_node if self.cwd is None: cwd = None else: role, file_name, template = self.cwd if role == CommandOutput.CMD_ARGV_INPUT_DIR: if isinstance(file_name, Node.Node): input_node = file_name else: input_node = self.path.find_dir(file_name) if input_node is None: Params.fatal("File %s not found" % (file_name,)) cwd = input_node.abspath() elif role == CommandOutput.CMD_ARGV_OUTPUT_DIR: if isinstance(file_name, Node.Node): output_node = file_name else: output_node = self.path.find_dir(file_name) if output_node is None: Params.fatal("File %s not found" % (file_name,)) cwd = output_node.abspath(self.env) else: raise AssertionError args = [] inputs = [] outputs = [] for arg in self.argv: if isinstance(arg, str): args.append(arg) else: role, file_name, template = arg if role == CommandOutput.CMD_ARGV_INPUT: if isinstance(file_name, Node.Node): input_node = file_name else: input_node = self.path.find_build(file_name, create=True) if input_node is None: Params.fatal("File %s not found" % (file_name,)) inputs.append(input_node) args.append((role, input_node, template)) elif role == CommandOutput.CMD_ARGV_OUTPUT: if isinstance(file_name, Node.Node): output_node = file_name else: output_node = self.path.find_build(file_name, create=True) if output_node is None: Params.fatal("File %s not found" % (file_name,)) outputs.append(output_node) args.append((role, output_node, template)) elif role == CommandOutput.CMD_ARGV_INPUT_DIR: if isinstance(file_name, Node.Node): input_node = file_name else: input_node = self.path.find_dir(file_name) if input_node is None: Params.fatal("File %s not found" % (file_name,)) args.append((role, input_node, template)) elif role == CommandOutput.CMD_ARGV_OUTPUT_DIR: if isinstance(file_name, Node.Node): output_node = file_name else: output_node = self.path.find_dir(file_name) if output_node is None: Params.fatal("File %s not found" % (file_name,)) args.append((role, output_node, template)) else: raise AssertionError if self.stdout is None: stdout = None else: stdout = self.path.find_build(self.stdout, create=True) if stdout is None: Params.fatal("File %s not found" % (self.stdout,)) outputs.append(stdout) if self.stdin is None: stdin = None else: stdin = self.path.find_build(self.stdin, create=True) if stdin is None: Params.fatal("File %s not found" % (self.stdin,)) inputs.append(stdin) for hidden_input in self.to_list(self.hidden_inputs): node = self.path.find_build(hidden_input, create=True) if node is None: Params.fatal("File %s not found in dir %s" % (hidden_input, self.path)) inputs.append(node) for hidden_output in self.to_list(self.hidden_outputs): node = self.path.find_build(hidden_output, create=True) if node is None: Params.fatal("File %s not found in dir %s" % (hidden_output, self.path)) outputs.append(node) if not inputs: Params.fatal("command-output objects must have at least one input file") if not outputs: Params.fatal("command-output objects must have at least one output file") task = CommandOutputTask(self.env, self.prio, cmd, cmd_node, args, stdin, stdout, cwd) self.m_tasks.append(task) task.set_inputs(inputs) task.set_outputs(outputs) task.dep_vars = self.to_list(self.dep_vars) for dep in self.dependencies: assert dep is not self if not dep.m_posted: dep.post() for dep_task in dep.m_tasks: task.set_run_after(dep_task) def input_file(self, file_name, template='%s'): """Returns an object to be used as argv element that instructs the task to use a file from the input vector at the given position as argv element.""" return (CommandOutput.CMD_ARGV_INPUT, file_name, template) def output_file(self, file_name, template='%s'): """Returns an object to be used as argv element that instructs the task to use a file from the output vector at the given position as argv element.""" return (CommandOutput.CMD_ARGV_OUTPUT, file_name, template) def input_dir(self, file_name, template='%s'): """Returns an object to be used as argv element that instructs the task to use a directory path from the input vector at the given position as argv element.""" return (CommandOutput.CMD_ARGV_INPUT_DIR, file_name, template) def output_dir(self, file_name, template='%s'): """Returns an object to be used as argv element that instructs the task to use a directory path from the output vector at the given position as argv element.""" return (CommandOutput.CMD_ARGV_OUTPUT_DIR, file_name, template) Action.Action('copy', vars=[], func=action_process_file_func) Action.Action('command-output', func=CommandOutput._command_output_func, color='BLUE') Object.task_gen.classes['command-output'] = CommandOutput lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/gnome.py0000664000175000017500000002616110772264157022457 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2006-2008 (ita) "Gnome support" import os, re import Object, Action, Params, Common, Scan, Utils, Runner import cc from Params import fatal, error from Object import taskgen, before, after n1_regexp = re.compile('(.*)', re.M) n2_regexp = re.compile('(.*)', re.M) def postinstall_schemas(prog_name): if Params.g_commands['install']: dir = Common.path_install('PREFIX', 'etc/gconf/schemas/%s.schemas' % prog_name) if not Params.g_options.destdir: # add the gconf schema Params.pprint('YELLOW', "Installing GConf schema.") command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir ret = Runner.exec_command(command) else: Params.pprint('YELLOW', "GConf schema not installed. After install, run this:") Params.pprint('YELLOW', "gconftool-2 --install-schema-file=%s" % dir) def postinstall_icons(): dir = Common.path_install('DATADIR', 'icons/hicolor') if Params.g_commands['install']: if not Params.g_options.destdir: # update the pixmap cache directory Params.pprint('YELLOW', "Updating Gtk icon cache.") command = 'gtk-update-icon-cache -q -f -t %s' % dir ret = Runner.exec_command(command) else: Params.pprint('YELLOW', "Icon cache not updated. After install, run this:") Params.pprint('YELLOW', "gtk-update-icon-cache -q -f -t %s" % dir) def postinstall_scrollkeeper(prog_name): if Params.g_commands['install']: # now the scrollkeeper update if we can write to the log file if os.path.iswriteable('/var/log/scrollkeeper.log'): dir1 = Common.path_install('PREFIX', 'var/scrollkeeper') dir2 = Common.path_install('DATADIR', 'omf/%s' % prog_name) command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2) ret = Runner.exec_command(command) def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1): if schemas: postinstall_schemas(prog_name) if icons: postinstall_icons() if scrollkeeper: postinstall_scrollkeeper(prog_name) # give specs class xml_to_taskgen(Object.task_gen): def __init__(self): Object.task_gen(self) self.source = 'xmlfile' self.xslt = 'xlsltfile' self.target = 'hey' self.inst_var = 'PREFIX' self.inst_dir = '' self.task_created = None def apply(self): self.env = self.env.copy() tree = Params.g_build current = tree.m_curdirnode xmlfile = self.path.find_source(self.source) xsltfile = self.path.find_source(self.xslt) tsk = self.create_task('xmlto', self.env, 6) tsk.set_inputs([xmlfile, xsltfile]) tsk.set_outputs(xmlfile.change_ext('html')) tsk.install = {'var':self.inst_var, 'dir':self.inst_dir} class sgml_man_scanner(Scan.scanner): def __init__(self): Scan.scanner.__init__(self) def scan(self, task, node): env = task.env() variant = node.variant(env) fi = open(node.abspath(env), 'r') content = fi.read() fi.close() name = n1_regexp.findall(content)[0] num = n2_regexp.findall(content)[0] doc_name = name+'.'+num return ([], [doc_name]) def do_scan(self, task, node): Scan.scanner.do_scan(self, task, node) variant = node.variant(task.env()) tmp_lst = Params.g_build.m_raw_deps[variant][node] name = tmp_lst[0] task.set_outputs(Params.g_build.m_curdirnode.find_build(name)) sgml_scanner = sgml_man_scanner() class gnome_sgml2manobj(Object.task_gen): def __init__(self, appname): Object.task_gen.__init__(self) self.m_tasks=[] self.m_appname = appname def apply(self): def install_result(task): out = task.m_outputs[0] name = out.m_name ext = name[-1] env = task.env() Common.install_files('DATADIR', 'man/man%s/' % ext, out.abspath(env), env) tree = Params.g_build tree.rescan(self.path) for node in self.path.files(): base, ext = os.path.splitext(node.m_name) if ext != '.sgml': continue task = self.create_task('sgml2man', self.env, 2) task.set_inputs(node) if Params.g_install: task.install = install_results # no outputs, the scanner does it # no caching for now, this is not a time-critical feature # in the future the scanner can be used to do more things (find dependencies, etc) sgml_scanner.do_scan(task, node) # Unlike the sgml and doc processing, the dbus and marshal beast # generate c/c++ code that we want to mix # here we attach new methods to Object.task_gen def add_marshal_file(self, filename, prefix, mode): if not hasattr(self, 'marshal_lst'): self.marshal_lst = [] self.meths.add('process_marshal') self.marshal_lst.append([filename, prefix, mode]) def process_marshal(self): for i in getattr(self, 'marshal_lst', []): env = self.env.copy() node = self.path.find_source(i[0]) if not node: fatal('file not found on gnome obj '+i[0]) if i[2] == '--header': env['GGM_PREFIX'] = i[1] env['GGM_MODE'] = i[2] task = self.create_task('glib_genmarshal', env, 2) task.set_inputs(node) task.set_outputs(node.change_ext('.h')) elif i[2] == '--body': env['GGM_PREFIX'] = i[1] env['GGM_MODE'] = i[2] # the c file generated will be processed too outnode = node.change_ext('.c') self.allnodes.append(outnode) task = self.create_task('glib_genmarshal', env, 2) task.set_inputs(node) task.set_outputs(node.change_ext('.c')) else: error("unknown type for marshal "+i[2]) def add_dbus_file(self, filename, prefix, mode): if not hasattr(self, 'dbus_lst'): self.dbus_lst = [] self.meths.add('process_dbus') self.dbus_lst.append([filename, prefix, mode]) def process_dbus(self): for i in getattr(self, 'dbus_lst', []): env = self.env.copy() node = self.path.find_source(i[0]) if not node: fatal('file not found on gnome obj '+i[0]) env['DBT_PREFIX'] = i[1] env['DBT_MODE'] = i[2] task = self.create_task('dbus_binding_tool', env, 2) task.set_inputs(node) task.set_outputs(node.change_ext('.h')) def process_enums(self): for x in getattr(self, 'mk_enums', []): # temporary env = self.env.copy() task = self.create_task('mk_enums', env) inputs = [] # process the source src_lst = self.to_list(x['source']) if not src_lst: Params.fatal('missing source '+str(x)) src_lst = [self.path.find_source(k) for k in src_lst] inputs += src_lst env['MK_SOURCE'] = [k.abspath(env) for k in src_lst] # find the target if not x['target']: Params.fatal('missing target '+str(x)) tgt_node = self.path.find_build(x['target'], create=1) if tgt_node.m_name.endswith('.c'): self.allnodes.append(tgt_node) env['MK_TARGET'] = tgt_node.abspath(env) # template, if provided if x['template']: template_node = self.path.find_source(x['template']) env['MK_TEMPLATE'] = '--template %s' % (template_node.abspath(env)) inputs.append(template_node) # update the task instance task.set_inputs(inputs) task.set_outputs(tgt_node) def add_glib_mkenum(self, source='', template='', target=''): "just a helper" if not hasattr(self, 'mk_enums'): self.mk_enums = [] self.meths.add('process_enums') self.mk_enums.append({'source':source, 'template':template, 'target':target}) Action.simple_action('mk_enums', '${GLIB_MKENUM} ${MK_TEMPLATE} ${MK_SOURCE} > ${MK_TARGET}', 'PINK', prio=30) Action.simple_action('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE') Action.simple_action('glib_genmarshal', '${GGM} ${SRC} --prefix=${GGM_PREFIX} ${GGM_MODE} > ${TGT}', color='BLUE') Action.simple_action('dbus_binding_tool', '${DBT} --prefix=${DBT_PREFIX} --mode=${DBT_MODE} --output=${TGT} ${SRC}', color='BLUE') Action.simple_action('xmlto', '${XMLTO} html -m ${SRC[1]} ${SRC[0]}') def detect(conf): conf.check_tool('checks') sgml2man = conf.find_program('docbook2man') #if not sgml2man: # fatal('The program docbook2man is mandatory!') conf.env['SGML2MAN'] = sgml2man glib_genmarshal = conf.find_program('glib-genmarshal') conf.env['GGM'] = glib_genmarshal dbus_binding_tool = conf.find_program('dbus-binding-tool') conf.env['DBT'] = dbus_binding_tool mk_enums_tool = conf.find_program('glib-mkenums') conf.env['GLIB_MKENUM'] = mk_enums_tool def getstr(varname): return getattr(Params.g_options, varname, '') prefix = conf.env['PREFIX'] datadir = getstr('datadir') libdir = getstr('libdir') sysconfdir = getstr('sysconfdir') localstatedir = getstr('localstatedir') if not datadir: datadir = os.path.join(prefix,'share') if not libdir: libdir = os.path.join(prefix,'lib') if not sysconfdir: if os.path.normpath(prefix) == '/usr': sysconfdir = '/etc' else: sysconfdir = os.path.join(prefix, 'etc') if not localstatedir: if os.path.normpath(prefix) == '/usr': localstatedir = '/var' else: localstatedir = os.path.join(prefix, 'var') # addefine also sets the variable to the env conf.define('GNOMELOCALEDIR', os.path.join(datadir, 'locale')) conf.define('DATADIR', datadir) conf.define('LIBDIR', libdir) conf.define('SYSCONFDIR', sysconfdir) conf.define('LOCALSTATEDIR', localstatedir) # TODO: maybe the following checks should be in a more generic module. #always defined to indicate that i18n is enabled */ conf.define('ENABLE_NLS', 1) # TODO #Define to 1 if you have the `bind_textdomain_codeset' function. conf.define('HAVE_BIND_TEXTDOMAIN_CODESET', 1) # TODO #Define to 1 if you have the `dcgettext' function. conf.define('HAVE_DCGETTEXT', 1) #Define to 1 if you have the header file. conf.check_header('dlfcn.h', 'HAVE_DLFCN_H') # TODO #Define if the GNU gettext() function is already present or preinstalled. conf.define('HAVE_GETTEXT', 1) #Define to 1 if you have the header file. conf.check_header('inttypes.h', 'HAVE_INTTYPES_H') # TODO FIXME #Define if your file defines LC_MESSAGES. #conf.add_define('HAVE_LC_MESSAGES', '1') #Define to 1 if you have the header file. conf.check_header('locale.h', 'HAVE_LOCALE_H') #Define to 1 if you have the header file. conf.check_header('memory.h', 'HAVE_MEMORY_H') #Define to 1 if you have the header file. conf.check_header('stdint.h', 'HAVE_STDINT_H') #Define to 1 if you have the header file. conf.check_header('stdlib.h', 'HAVE_STDLIB_H') #Define to 1 if you have the header file. conf.check_header('strings.h', 'HAVE_STRINGS_H') #Define to 1 if you have the header file. conf.check_header('string.h', 'HAVE_STRING_H') #Define to 1 if you have the header file. conf.check_header('sys/stat.h', 'HAVE_SYS_STAT_H') #Define to 1 if you have the header file. conf.check_header('sys/types.h', 'HAVE_SYS_TYPES_H') #Define to 1 if you have the header file. conf.check_header('unistd.h', 'HAVE_UNISTD_H') def set_options(opt): try: # we do not know yet opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') except Exception: pass for i in "execprefix datadir libdir sysconfdir localstatedir".split(): opt.add_option('--'+i, type='string', default='', dest=i) taskgen(add_marshal_file) taskgen(process_marshal) before('apply_core')(process_marshal) taskgen(add_dbus_file) taskgen(process_dbus) before('apply_core')(process_dbus) taskgen(process_enums) before('apply_core')(process_enums) taskgen(add_glib_mkenum) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/vala.py0000664000175000017500000000706710771537440022276 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Ali Sabil, 2007 import os.path, shutil import Action, Object, Runner, Utils, Params, Node from Object import extension EXT_VALA = ['.vala'] class ValacAction(Action.Action): def __init__(self): Action.Action.__init__(self, 'valac', color='GREEN') def get_str(self, task): "string to display to the user" env = task.env() src_str = " ".join([a.m_name for a in task.m_inputs]) return "* %s : %s" % (self.m_name, src_str) def run(self, task): env = task.env() inputs = [a.srcpath(env) for a in task.m_inputs] valac = env['VALAC'] vala_flags = env.get_flat('VALAFLAGS') top_src = Params.g_build.m_srcnode.abspath() top_bld = Params.g_build.m_srcnode.abspath(env) cmd = [valac, '-C', vala_flags] if task.threading: cmd.append('--thread') if task.output_type in ('shlib', 'staticlib', 'plugin'): cmd.append('--library ' + task.target) cmd.append('--basedir ' + top_src) cmd.append('-d ' + top_bld) #cmd.append('-d %s' % Params.g_build.m_srcnode.abspath(bld.env())) #cmd.append('-d %s' % Params.g_build.m_bldnode.bldpath(env)) else: output_dir = task.m_outputs[0].bld_dir(env) cmd.append('-d %s' % output_dir) for vapi_dir in task.vapi_dirs: cmd.append('--vapidir=%s' % vapi_dir) for package in task.packages: cmd.append('--pkg %s' % package) cmd.append(" ".join(inputs)) result = Runner.exec_command(" ".join(cmd)) if task.output_type in ('shlib', 'staticlib', 'plugin'): # generate the .deps file if task.packages: filename = os.path.join(task.m_outputs[0].bld_dir(env), "%s.deps" % task.target) deps = open(filename, 'w') for package in task.packages: deps.write(package + '\n') deps.close() # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi # waf is always run from the build directory try: src_vapi = os.path.join(top_bld, "..", "%s.vapi" % task.target) dst_vapi = task.m_outputs[0].bld_dir(env) shutil.move(src_vapi, dst_vapi) except IOError: pass return result def vala_file(self, node): valatask = self.create_task('valac') valatask.output_type = self.m_type valatask.packages = [] valatask.vapi_dirs = [] valatask.target = self.target valatask.threading = False if hasattr(self, 'packages'): valatask.packages = Utils.to_list(self.packages) if hasattr(self, 'vapi_dirs'): vapi_dirs = Utils.to_list(self.vapi_dirs) for vapi_dir in vapi_dirs: valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env)) if hasattr(self, 'threading'): valatask.threading = self.threading input_nodes = [] for source in self.to_list(self.source): if source.endswith(".vala"): input_nodes.append(self.path.find_source(source)) valatask.set_inputs(input_nodes) output_nodes = [] for node in input_nodes: output_nodes.append(node.change_ext('.c')) output_nodes.append(node.change_ext('.h')) if self.m_type != 'program': output_nodes.append(self.path.find_build('%s.vapi' % self.target)) if valatask.packages: output_nodes.append(self.path.find_build('%s.deps' % self.target)) valatask.set_outputs(output_nodes) for node in valatask.m_outputs: if node.m_name.endswith('.c'): self.allnodes.append(node) # create our action here ValacAction() def detect(conf): valac = conf.find_program('valac', var='VALAC') if not valac: conf.fatal('Could not find the valac compiler anywhere') conf.env['VALAC'] = valac conf.env['VALAFLAGS'] = '' extension(EXT_VALA)(vala_file) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/config_c.py0000664000175000017500000007715510772251134023121 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) """ the c/c++ configuration routines """ import os, types, imp, cPickle, sys, shlex, warnings try: from hashlib import md5 except ImportError: from md5 import md5 import Action, Params, Environment, Runner, Build, Utils, Object, Configure from Params import fatal, warning from Constants import * def wrap(cls): def foo(self): x = globals()[cls.__name__](self) #print x return x setattr(Configure.Configure, 'create_'+cls.__name__, foo) class enumerator_base(object): def __init__(self, conf): self.conf = conf self.env = conf.env self.define = '' self.mandatory = 0 self.message = '' def error(self): if self.message: fatal(self.message) else: fatal('A mandatory check failed. Make sure all dependencies are ok and can be found.') def update_hash(self, md5hash): classvars = vars(self) for (var, value) in classvars.iteritems(): # TODO comparing value to env is fast or slow ? if callable(var): continue if value == self: continue if value == self.env: continue if value == self.conf: continue md5hash.update(str(value)) def update_env(self, hashtable): # skip this if hashtable is only a string if not type(hashtable) is types.StringType: for name in hashtable.keys(): self.env.append_value(name, hashtable[name]) def validate(self): pass def hash(self): m = md5() self.update_hash(m) return m.digest() def run_cache(self, retvalue): # interface, do not remove pass def run(self): self.validate() if Params.g_cache_global and not Params.g_options.nocache: newhash = self.hash() try: ret = self.conf.m_cache_table[newhash] except KeyError: pass # go to A1 just below else: self.run_cache(ret) if self.mandatory and not ret: self.error() return ret # A1 - no cache or new test ret = self.run_test() if self.mandatory and not ret: self.error() if Params.g_cache_global: self.conf.m_cache_table[newhash] = ret return ret # Override this method, not run()! def run_test(self): return not Configure.TEST_OK class configurator_base(enumerator_base): def __init__(self, conf): enumerator_base.__init__(self, conf) self.uselib = '' class program_enumerator(enumerator_base): def __init__(self,conf): enumerator_base.__init__(self, conf) self.name = '' self.path = [] self.var = None def error(self): errmsg = 'program %s cannot be found' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def run_cache(self, retval): self.conf.check_message('program %s (cached)' % self.name, '', retval, option=retval) if self.var: self.env[self.var] = retval def run_test(self): ret = Configure.find_program_impl(self.env, self.name, self.path, self.var) self.conf.check_message('program', self.name, ret, ret) if self.var: self.env[self.var] = ret return ret wrap(program_enumerator) class function_enumerator(enumerator_base): def __init__(self,conf): enumerator_base.__init__(self, conf) self.function = '' self.define = '' self.headers = [] self.header_code = '' self.custom_code = '' self.include_paths = [] self.libs = [] self.lib_paths = [] def error(self): errmsg = 'function %s cannot be found' % self.function if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def validate(self): if not self.define: self.define = self.function.upper() def run_cache(self, retval): self.conf.check_message('function %s (cached)' % self.function, '', retval, option='') if retval: self.conf.define(self.define, retval) else: self.conf.undefine(self.define) def run_test(self): ret = not Configure.TEST_OK oldlibpath = self.env['LIBPATH'] oldlib = self.env['LIB'] code = [] code.append(self.header_code) code.append('\n') for header in self.headers: code.append('#include <%s>\n' % header) if self.custom_code: code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) else: code.append('int main(){\nvoid *p;\np=(void*)(%s);\nreturn 0;\n}\n' % self.function) self.env['LIB'] = self.libs self.env['LIBPATH'] = self.lib_paths obj = check_data() obj.code = "\n".join(code) obj.includes = self.include_paths obj.env = self.env ret = int(self.conf.run_check(obj)) self.conf.check_message('function %s' % self.function, '', ret, option='') if ret: self.conf.define(self.define, ret) else: self.conf.undefine(self.define) self.env['LIB'] = oldlib self.env['LIBPATH'] = oldlibpath return ret wrap(function_enumerator) class library_enumerator(enumerator_base): "find a library in a list of paths" def __init__(self, conf): enumerator_base.__init__(self, conf) self.name = '' self.path = [] self.code = 'int main() {return 0;}\n' self.uselib = '' # to set the LIB_NAME and LIBPATH_NAME self.nosystem = 0 # do not use standard lib paths self.want_message = 1 def error(self): errmsg = 'library %s cannot be found' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def run_cache(self, retval): if self.want_message: self.conf.check_message('library %s (cached)' % self.name, '', retval, option=retval) self.update_env(retval) def validate(self): if not self.path: self.path = Configure.g_stdlibpath else: if not self.nosystem: self.path += Configure.g_stdlibpath def run_test(self): ret = '' # returns a string patterns = [self.env['shlib_PATTERN'], 'lib%s.dll.a', 'lib%s.lib', self.env['staticlib_PATTERN']] for x in patterns: name = x % self.name ret = Configure.find_file(name, self.path) if ret: break if self.want_message: self.conf.check_message('library '+self.name, '', ret, option=ret) if self.uselib: self.env['LIB_'+self.uselib] += [ self.name ] self.env['LIBPATH_'+self.uselib] += [ ret ] return ret wrap(library_enumerator) class header_enumerator(enumerator_base): "find a header in a list of paths" def __init__(self,conf): enumerator_base.__init__(self, conf) self.name = [] self.path = [] self.define = [] self.nosystem = 0 self.want_message = 1 def validate(self): if not self.path: self.path = Configure.g_stdincpath else: if not self.nosystem: self.path += Configure.g_stdincpath def error(self): errmsg = 'cannot find %s in %s' % (self.name, str(self.path)) if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def run_cache(self, retval): if self.want_message: self.conf.check_message('header %s (cached)' % self.name, '', retval, option=retval) if self.define: self.env[self.define] = retval def run_test(self): ret = Configure.find_file(self.name, self.path) if self.want_message: self.conf.check_message('header', self.name, ret, ret) if self.define: self.env[self.define] = ret return ret wrap(header_enumerator) ## ENUMERATORS END ################### ################### ## CONFIGURATORS class cfgtool_configurator(configurator_base): def __init__(self,conf): configurator_base.__init__(self, conf) self.uselib = '' self.define = '' self.binary = '' self.tests = {} def error(self): errmsg = '%s cannot be found' % self.binary if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def validate(self): if not self.binary: raise ValueError, "no binary given in cfgtool!" if not self.uselib: raise ValueError, "no uselib given in cfgtool!" if not self.define and self.uselib: self.define = 'HAVE_'+self.uselib if not self.tests: self.tests['--cflags'] = 'CCFLAGS' self.tests['--cflags'] = 'CXXFLAGS' self.tests['--libs'] = 'LINKFLAGS' def run_cache(self, retval): if retval: self.update_env(retval) self.conf.define(self.define, 1) else: self.conf.undefine(self.define) self.conf.check_message('config-tool %s (cached)' % self.binary, '', retval, option='') def run_test(self): retval = {} found = Configure.TEST_OK null='2>/dev/null' if sys.platform == "win32": null='2>nul' try: ret = os.popen('%s %s %s' % (self.binary, self.tests.keys()[0], null)).close() if ret: raise ValueError, "error" for flag in self.tests: var = self.tests[flag] + '_' + self.uselib cmd = '%s %s %s' % (self.binary, flag, null) retval[var] = [os.popen(cmd).read().strip()] self.update_env(retval) except ValueError: retval = {} found = not Configure.TEST_OK if found: self.conf.define(self.define, found) else: self.conf.undefine(self.define) self.conf.check_message('config-tool ' + self.binary, '', found, option = '') return retval wrap(cfgtool_configurator) class pkgconfig_configurator(configurator_base): """ pkgconfig_configurator is a frontend to pkg-config variables: - name: name of the .pc file (has to be set at least) - version: atleast-version to check for - path: override the pkgconfig path (PKG_CONFIG_PATH) - uselib: name that could be used in tasks with obj.uselib if not set uselib = upper(name) - define: name that will be used in config.h if not set define = HAVE_+uselib - variables: list of addional variables to be checked for, for example variables='prefix libdir' """ def __init__(self, conf): configurator_base.__init__(self,conf) self.name = '' # name of the .pc file self.version = '' # version to check self.pkgpath = os.path.join(Params.g_options.prefix, 'lib', 'pkgconfig') # pkg config path self.uselib = '' # can be set automatically self.define = '' # can be set automatically self.binary = '' # name and path for pkg-config # You could also check for extra values in a pkg-config file. # Use this value to define which values should be checked # and defined. Several formats for this value are supported: # - string with spaces to separate a list # - list of values to check (define name will be upper(uselib"_"value_name)) # - a list of [value_name, override define_name] self.variables = [] self.defines = {} def error(self): if self.version: errmsg = 'pkg-config cannot find %s >= %s' % (self.name, self.version) else: errmsg = 'pkg-config cannot find %s' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def validate(self): if not self.uselib: self.uselib = self.name.upper() if not self.define: self.define = 'HAVE_'+self.uselib def run_cache(self, retval): if self.version: self.conf.check_message('package %s >= %s (cached)' % (self.name, self.version), '', retval, option='') else: self.conf.check_message('package %s (cached)' % self.name, '', retval, option='') if retval: self.conf.define(self.define, 1) else: self.conf.undefine(self.define) self.update_env(retval) def _setup_pkg_config_path(self): pkgpath = self.pkgpath if not pkgpath: return "" if sys.platform == 'win32': if hasattr(self, 'pkgpath_win32_setup'): return "" pkgpath_env=os.getenv('PKG_CONFIG_PATH') if pkgpath_env: pkgpath_env = pkgpath_env + ';' +pkgpath else: pkgpath_env = pkgpath os.putenv('PKG_CONFIG_PATH',pkgpath_env) setattr(self,'pkgpath_win32_setup',True) return "" pkgpath = 'PKG_CONFIG_PATH=$PKG_CONFIG_PATH:' + pkgpath return pkgpath def run_test(self): pkgpath = self.pkgpath pkgbin = self.binary uselib = self.uselib # check if self.variables is a string with spaces # to separate the variables to check for # if yes convert variables to a list if type(self.variables) is types.StringType: self.variables = str(self.variables).split() if not pkgbin: pkgbin = 'pkg-config' pkgpath = self._setup_pkg_config_path() pkgcom = '%s %s' % (pkgpath, pkgbin) for key, val in self.defines.items(): pkgcom += ' --define-variable=%s=%s' % (key, val) g_defines = self.env['PKG_CONFIG_DEFINES'] if type(g_defines) is types.DictType: for key, val in g_defines.items(): if self.defines and self.defines.has_key(key): continue pkgcom += ' --define-variable=%s=%s' % (key, val) retval = {} try: if self.version: cmd = "%s --atleast-version=%s \"%s\"" % (pkgcom, self.version, self.name) ret = os.popen(cmd).close() Params.debug("pkg-config cmd '%s' returned %s" % (cmd, ret)) self.conf.check_message('package %s >= %s' % (self.name, self.version), '', not ret) if ret: raise ValueError, "error" else: cmd = "%s \"%s\"" % (pkgcom, self.name) ret = os.popen(cmd).close() Params.debug("pkg-config cmd '%s' returned %s" % (cmd, ret)) self.conf.check_message('package %s' % (self.name), '', not ret) if ret: raise ValueError, "error" cflags_I = shlex.split(os.popen('%s --cflags-only-I \"%s\"' % (pkgcom, self.name)).read()) cflags_other = shlex.split(os.popen('%s --cflags-only-other \"%s\"' % (pkgcom, self.name)).read()) retval['CCFLAGS_'+uselib] = cflags_other retval['CXXFLAGS_'+uselib] = cflags_other retval['CPPPATH_'+uselib] = [] for incpath in cflags_I: assert incpath[:2] == '-I' or incpath[:2] == '/I' retval['CPPPATH_'+uselib].append(incpath[2:]) # strip '-I' or '/I' #env['LINKFLAGS_'+uselib] = os.popen('%s --libs %s' % (pkgcom, self.name)).read().strip() # Store the library names: modlibs = os.popen('%s --libs-only-l \"%s\"' % (pkgcom, self.name)).read().strip().split() retval['LIB_'+uselib] = [] for item in modlibs: retval['LIB_'+uselib].append( item[2:] ) #Strip '-l' # Store the library paths: modpaths = os.popen('%s --libs-only-L \"%s\"' % (pkgcom, self.name)).read().strip().split() retval['LIBPATH_'+uselib] = [] for item in modpaths: retval['LIBPATH_'+uselib].append( item[2:] ) #Strip '-l' # Store only other: modother = os.popen('%s --libs-only-other \"%s\"' % (pkgcom, self.name)).read().strip().split() retval['LINKFLAGS_'+uselib] = [] for item in modother: if str(item).endswith(".la"): import libtool la_config = libtool.libtool_config(item) libs_only_L = la_config.get_libs_only_L() libs_only_l = la_config.get_libs_only_l() for entry in libs_only_l: retval['LIB_'+uselib].append( entry[2:] ) #Strip '-l' for entry in libs_only_L: retval['LIBPATH_'+uselib].append( entry[2:] ) #Strip '-L' else: retval['LINKFLAGS_'+uselib].append( item ) #do not strip anything for variable in self.variables: var_defname = '' # check if variable is a list if (type(variable) is types.ListType): # is it a list of [value_name, override define_name] ? if len(variable) == 2 and variable[1]: # if so use the overrided define_name as var_defname var_defname = variable[1] # convert variable to a string that name the variable to check for. variable = variable[0] # if var_defname was not overrided by the list containing the define_name if not var_defname: var_defname = uselib + '_' + variable.upper() retval[var_defname] = os.popen('%s --variable=%s \"%s\"' % (pkgcom, variable, self.name)).read().strip() self.conf.define(self.define, 1) self.update_env(retval) except ValueError: retval = {} self.conf.undefine(self.define) return retval wrap(pkgconfig_configurator) class test_configurator(configurator_base): def __init__(self, conf): configurator_base.__init__(self, conf) self.name = '' self.code = '' self.flags = '' self.define = '' self.uselib = '' self.want_message = 0 def error(self): errmsg = 'test program would not run' if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def run_cache(self, retval): if self.want_message: self.conf.check_message('custom code (cached)', '', 1, option=retval['result']) def validate(self): if not self.code: fatal('test configurator needs code to compile and run!') def run_test(self): obj = check_data() obj.code = self.code obj.env = self.env obj.uselib = self.uselib obj.flags = self.flags obj.execute = 1 ret = self.conf.run_check(obj) if self.want_message: if ret: data = ret['result'] else: data = '' self.conf.check_message('custom code', '', ret, option=data) return ret wrap(test_configurator) class library_configurator(configurator_base): def __init__(self,conf): configurator_base.__init__(self,conf) self.name = '' self.path = [] self.define = '' self.uselib = '' self.code = 'int main(){return 0;}\n' def error(self): errmsg = 'library %s cannot be linked' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def run_cache(self, retval): self.conf.check_message('library %s (cached)' % self.name, '', retval) if retval: self.update_env(retval) self.conf.define(self.define, 1) else: self.conf.undefine(self.define) def validate(self): if not self.path: self.path = ['/usr/lib/', '/usr/local/lib', '/lib'] if not self.uselib: self.uselib = self.name.upper() if not self.define: self.define = 'HAVE_'+self.uselib if not self.uselib: fatal('uselib is not defined') if not self.code: fatal('library enumerator must have code to compile') def run_test(self): oldlibpath = self.env['LIBPATH'] oldlib = self.env['LIB'] olduselibpath = self.env['LIBPATH_'+self.uselib] olduselib = self.env['LIB_'+self.uselib] # try the enumerator to find the correct libpath test = self.conf.create_library_enumerator() test.name = self.name test.want_message = 0 test.path = self.path test.env = self.env ret = test.run() if ret: self.env['LIBPATH_'+self.uselib] += [ ret ] self.env['LIB_'+self.uselib] += [ self.name ] #self.env['LIB'] = self.name #self.env['LIBPATH'] = self.lib_paths obj = check_data() obj.code = self.code obj.env = self.env obj.uselib = self.uselib obj.libpath = self.path ret = int(self.conf.run_check(obj)) self.conf.check_message('library %s' % self.name, '', ret) if ret: self.conf.define(self.define, ret) else: self.conf.undefine(self.define) val = {} if ret: val['LIBPATH_'+self.uselib] = self.env['LIBPATH_'+self.uselib] val['LIB_'+self.uselib] = self.env['LIB_'+self.uselib] val[self.define] = ret else: self.env['LIBPATH_'+self.uselib] = olduselibpath self.env['LIB_'+self.uselib] = olduselib self.env['LIB'] = oldlib self.env['LIBPATH'] = oldlibpath return val wrap(library_configurator) class framework_configurator(configurator_base): def __init__(self,conf): configurator_base.__init__(self,conf) self.name = '' self.custom_code = '' self.code = 'int main(){return 0;}\n' self.define = '' # HAVE_something self.path = [] self.uselib = '' self.remove_dot_h = False def error(self): errmsg = 'framework %s cannot be found via compiler, try pass -F' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def validate(self): if not self.uselib: self.uselib = self.name.upper() if not self.define: self.define = 'HAVE_'+self.uselib if not self.code: self.code = "#include <%s>\nint main(){return 0;}\n" if not self.uselib: self.uselib = self.name.upper() def run_cache(self, retval): self.conf.check_message('framework %s (cached)' % self.name, '', retval) self.update_env(retval) if retval: self.conf.define(self.define, 1) else: self.conf.undefine(self.define) def run_test(self): oldlkflags = [] oldccflags = [] oldcxxflags = [] oldlkflags += self.env['LINKFLAGS'] oldccflags += self.env['CCFLAGS'] oldcxxflags += self.env['CXXFLAGS'] code = [] if self.remove_dot_h: code.append('#include <%s/%s>\n' % (self.name, self.name)) else: code.append('#include <%s/%s.h>\n' % (self.name, self.name)) code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) linkflags = [] linkflags += ['-framework', self.name] linkflags += ['-F%s' % p for p in self.path] cflags = ['-F%s' % p for p in self.path] myenv = self.env.copy() myenv['LINKFLAGS'] += linkflags obj = check_data() obj.code = "\n".join(code) obj.env = myenv obj.uselib = self.uselib obj.flags += " ".join (cflags) ret = int(self.conf.run_check(obj)) self.conf.check_message('framework %s' % self.name, '', ret, option='') if ret: self.conf.define(self.define, ret) else: self.conf.undefine(self.define) val = {} if ret: val["LINKFLAGS_" + self.uselib] = linkflags val["CCFLAGS_" + self.uselib] = cflags val["CXXFLAGS_" + self.uselib] = cflags val[self.define] = ret self.env['LINKFLAGS'] = oldlkflags self.env['CCFLAGS'] = oldccflags self.env['CXXFLAGS'] = oldcxxflags self.update_env(val) return val wrap(framework_configurator) class header_configurator(configurator_base): def __init__(self, conf): configurator_base.__init__(self,conf) self.name = '' self.path = [] self.header_code = '' self.custom_code = '' self.code = 'int main() {return 0;}\n' self.define = '' # HAVE_something self.libs = [] self.lib_paths = [] self.uselib = '' def error(self): errmsg = 'header %s cannot be found via compiler' % self.name if self.message: errmsg += '\n%s' % self.message fatal(errmsg) def validate(self): # self.names = self.names.split() if not self.define: if self.name: self.define = 'HAVE_'+ Utils.quote_define_name(self.name) elif self.uselib: self.define = 'HAVE_'+self.uselib if not self.code: self.code = "#include <%s>\nint main(){return 0;}\n" if not self.define: fatal('no define given') def run_cache(self, retvalue): self.conf.check_message('header %s (cached)' % self.name, '', retvalue) if retvalue: self.update_env(retvalue) self.conf.define(self.define, 1) else: self.conf.undefine(self.define) def run_test(self): ret = {} # not found oldlibpath = self.env['LIBPATH'] oldlib = self.env['LIB'] # try the enumerator to find the correct includepath if self.uselib: test = self.conf.create_header_enumerator() test.name = self.name test.want_message = 0 test.path = self.path test.env = self.env ret = test.run() if ret: self.env['CPPPATH_'+self.uselib] = ret code = [] code.append(self.header_code) code.append('\n') code.append('#include <%s>\n' % self.name) code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) self.env['LIB'] = self.libs self.env['LIBPATH'] = self.lib_paths obj = check_data() obj.code = "\n".join(code) obj.includes = self.path obj.env = self.env obj.uselib = self.uselib ret = int(self.conf.run_check(obj)) self.conf.check_message('header %s' % self.name, '', ret, option='') if ret: self.conf.define(self.define, ret) else: self.conf.undefine(self.define) self.env['LIB'] = oldlib self.env['LIBPATH'] = oldlibpath val = {} if ret: val['CPPPATH_'+self.uselib] = self.env['CPPPATH_'+self.uselib] val[self.define] = ret if not ret: return {} return val wrap(header_configurator) class common_include_configurator(header_enumerator): """Looks for a given header. If found, it will be written later by write_config_header() Forced include files are headers that are being used by all source files. One can include files this way using gcc '-include file.h' or msvc '/fi file.h'. The alternative suggested here (common includes) is: Make all files include 'config.h', then add these forced-included headers to config.h (good for compilers that don't have have this feature and for further flexibility). """ def run_test(self): # if a header was found, header_enumerator returns its directory. header_dir = header_enumerator.run_test(self) if header_dir: # if the header was found, add its path to set of forced_include files # to be using later in write_config_header() header_path = os.path.join(header_dir, self.name) # if this header was not stored already, add it to the list of common headers. self.env.append_unique(COMMON_INCLUDES, header_path) # the return value of all enumerators is checked by enumerator_base.run() return header_dir wrap(common_include_configurator) # CONFIGURATORS END #################### class check_data(object): def __init__(self): self.env = '' # environment to use self.code = '' # the code to execute self.flags = '' # the flags to give to the compiler self.uselib = '' # uselib self.includes = '' # include paths self.function_name = '' # function to check for self.lib = [] self.libpath = [] # libpath for linking self.define = '' # define to add if run is successful self.header_name = '' # header name to check for self.execute = 0 # execute the program produced and return its output self.options = '' # command-line options self.force_compiler= None self.build_type = 'program' setattr(Configure, 'check_data', check_data) # warning, attached to the module def define(self, define, value): """store a single define and its state into an internal list for later writing to a config header file. Value can only be a string or int; other types not supported. String values will appear properly quoted in the generated header file.""" assert define and isinstance(define, str) tbl = self.env[DEFINES] if not tbl: tbl = {} # the user forgot to tell if the value is quoted or not if isinstance(value, str): tbl[define] = '"%s"' % str(value) elif isinstance(value, int): tbl[define] = value else: raise TypeError # add later to make reconfiguring faster self.env[DEFINES] = tbl self.env[define] = value setattr(Configure.Configure, "define", define) def undefine(self, define): """store a single define and its state into an internal list for later writing to a config header file""" assert define and isinstance(define, str) tbl = self.env[DEFINES] if not tbl: tbl = {} value = UNDEFINED tbl[define] = value # add later to make reconfiguring faster self.env[DEFINES] = tbl self.env[define] = value setattr(Configure.Configure, "undefine", undefine) def define_cond(self, name, value): """Conditionally define a name. Formally equivalent to: if value: define(name, 1) else: undefine(name)""" if value: self.define(name, 1) else: self.undefine(name) setattr(Configure.Configure, "define_cond", define_cond) def is_defined(self, define): defines = self.env[DEFINES] if not defines: return False try: value = defines[define] except KeyError: return False else: return (value is not UNDEFINED) setattr(Configure.Configure, "is_defined", is_defined) def get_define(self, define): "get the value of a previously stored define" try: return self.env[DEFINES][define] except KeyError: return None setattr(Configure.Configure, "get_define", get_define) def write_config_header(self, configfile='config.h', env=''): "save the defines into a file" if configfile == '': configfile = self.configheader lst=Utils.split_path(configfile) base = lst[:-1] if not env: env = self.env base = [self.m_blddir, env.variant()]+base dir = os.path.join(*base) if not os.path.exists(dir): os.makedirs(dir) dir = os.path.join(dir, lst[-1]) # remember config files - do not remove them on "waf clean" self.env.append_value('waf_config_files', os.path.abspath(dir)) inclusion_guard_name = '_%s_WAF' % Utils.quote_define_name(configfile) dest = open(dir, 'w') dest.write('/* Configuration header created by Waf - do not edit */\n') dest.write('#ifndef %s\n#define %s\n\n' % (inclusion_guard_name, inclusion_guard_name)) # yes, this is special if not configfile in self.env['dep_files']: self.env['dep_files'] += [configfile] if not env[DEFINES]: env[DEFINES]={'missing':'"code"'} for key, value in env[DEFINES].iteritems(): if value is None: dest.write('#define %s\n' % key) elif value is UNDEFINED: dest.write('/* #undef %s */\n' % key) else: dest.write('#define %s %s\n' % (key, value)) # Adds common-includes to config header. Should come after defines, # so they will be defined for the common include files too. for include_file in self.env[COMMON_INCLUDES]: dest.write('\n#include "%s"' % include_file) dest.write('\n#endif /* %s */\n' % (inclusion_guard_name,)) dest.close() setattr(Configure.Configure, "write_config_header", write_config_header) def set_config_header(self, header): "set a config header file" self.configheader = header setattr(Configure.Configure, "set_config_header", set_config_header) def run_check(self, obj): """compile, link and run if necessary @param obj: data of type check_data @return: (False if a error during build happens) or ( (True if build ok) or (a {'result': ''} if execute was set)) """ # first make sure the code to execute is defined if not obj.code: raise ConfigurationError('run_check: no code to process in check') # create a small folder for testing dir = os.path.join(self.m_blddir, '.wscript-trybuild') # if the folder already exists, remove it for (root, dirs, filenames) in os.walk(dir): for f in list(filenames): os.remove(os.path.join(root, f)) bdir = os.path.join( dir, '_testbuild_') if (not obj.force_compiler and Action.g_actions.get('cpp', None)) or obj.force_compiler == "cpp": tp = 'cpp' test_f_name = 'test.cpp' else: tp = 'cc' test_f_name = 'test.c' # FIXME: by default the following lines are called more than once # we have to make sure they get called only once if not os.path.exists(dir): os.makedirs(dir) if not os.path.exists(bdir): os.makedirs(bdir) if obj.env: env = obj.env else: env = self.env.copy() dest=open(os.path.join(dir, test_f_name), 'w') dest.write(obj.code) dest.close() # very important Utils.reset() back=os.path.abspath('.') bld = Build.Build() bld.m_allenvs.update(self.m_allenvs) bld.m_allenvs['default'] = env bld._variants=bld.m_allenvs.keys() bld.load_dirs(dir, bdir, isconfigure=1) os.chdir(dir) # not sure yet when to call this: #bld.rescan(bld.m_srcnode) o = Object.task_gen.classes[tp](obj.build_type) o.source = test_f_name o.target = 'testprog' o.uselib = obj.uselib o.cppflags = obj.flags o.includes = obj.includes # compile the program self.mute_logging() try: ret = bld.compile() except Build.BuildError: ret = 1 self.restore_logging() # keep the name of the program to execute if obj.execute: lastprog = o.link_task.m_outputs[0].abspath(o.env) #if runopts is not None: # ret = os.popen(obj.link_task.m_outputs[0].abspath(obj.env)).read().strip() os.chdir(back) Utils.reset() # if we need to run the program, try to get its result if obj.execute: if ret: return not ret data = os.popen('"%s"' %lastprog).read().strip() ret = {'result': data} return ret return not ret setattr(Configure.Configure, "run_check", run_check) # TODO OBSOLETE remove for waf 1.4 def add_define(self, define, value, quote=-1, comment=''): fatal("DEPRECATED use conf.define() / conf.undefine() / conf.define_cond() instead") setattr(Configure.Configure, "add_define", add_define) def check_features(self, kind='cc'): v = self.env # check for compiler features: programs, shared and static libraries test = Configure.check_data() test.code = 'int main() {return 0;}\n' test.env = v test.execute = 1 test.force_compiler = kind ret = self.run_check(test) self.check_message('compiler could create', 'programs', not (ret is False)) if not ret: self.fatal("no programs") lib_obj = Configure.check_data() lib_obj.code = "int k = 3;\n" lib_obj.env = v lib_obj.build_type = "shlib" lib_obj.force_compiler = kind ret = self.run_check(lib_obj) self.check_message('compiler could create', 'shared libs', not (ret is False)) if not ret: self.fatal("no shared libs") lib_obj = Configure.check_data() lib_obj.code = "int k = 3;\n" lib_obj.env = v lib_obj.build_type = "staticlib" lib_obj.force_compiler = kind ret = self.run_check(lib_obj) self.check_message('compiler could create', 'static libs', not (ret is False)) if not ret: self.fatal("no static libs") setattr(Configure.Configure, "check_features", check_features) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Tools/d.py0000664000175000017500000003175410772264157021601 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) # Thomas Nagy, 2007-2008 (ita) import os, sys, re, optparse import ccroot # <- leave this import Object, Utils, Action, Params, checks, Configure, Scan from Params import debug, error from Object import taskgen, feature, after, before, extension EXT_D = ['.d', '.di', '.D'] D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps', 'install_target'] # additional d methods def filter_comments(filename): f = open(filename, 'r') txt = f.read() f.close() buf = [] i = 0 max = len(txt) while i < max: c = txt[i] # skip a string if c == '"': i += 1 c = '' while i < max: p = c c = txt[i] i += 1 if i == max: return buf if c == '"': cnt = 0 while i < cnt and i < max: #print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt] if txt[i-2-cnt] == '\\': cnt+=1 else: break #print "cnt is ", str(cnt) if (cnt%2)==0: break # i -= 1 # <- useless in practice # skip a char elif c == "'": i += 1 if i == max: return buf c = txt[i] if c == '\\': i += 1 if i == max: return buf c = txt[i] if c == 'x': i += 2 # skip two chars i += 1 if i == max: return buf c = txt[i] if c != '\'': print "uh-oh, invalid character" # skip a comment elif c == '/': if i == max: break c = txt[i+1] # eat /+ +/ comments if c == '+': i += 1 nesting = 1 prev = 0 while i < max: c = txt[i] if c == '+': prev = 1 elif c == '/': if prev: nesting -= 1 if nesting == 0: break else: if i < max: i += 1 c = txt[i] if c == '+': nesting += 1 else: return buf else: prev = 0 i += 1 # eat /* */ comments elif c == '*': i += 1 while i < max: c = txt[i] if c == '*': prev = 1 elif c == '/': if prev: break else: prev = 0 i += 1 # eat // comments elif c == '/': i += 1 c = txt[i] while i < max and c != '\n': i += 1 c = txt[i] # a valid char, add it to the buffer else: buf.append(c) i += 1 return buf class d_parser(object): def __init__(self, env, incpaths): #self.code = '' #self.module = '' #self.imports = [] self.allnames = [] self.re_module = re.compile("module\s+([^;]+)") self.re_import = re.compile("import\s+([^;]+)") self.re_import_bindings = re.compile("([^:]+):(.*)") self.re_import_alias = re.compile("[^=]+=(.+)") self.env = env self.m_nodes = [] self.m_names = [] self.incpaths = incpaths def tryfind(self, filename): found = 0 for n in self.incpaths: found = n.find_source(filename.replace('.', '/')+'.d', create=0) if found: self.m_nodes.append(found) self.waiting.append(found) break if not found: if not filename in self.m_names: self.m_names.append(filename) def get_strings(self, code): #self.imports = [] self.module = '' lst = [] # get the module name (if present) mod_name = self.re_module.search(code) if mod_name: self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces # go through the code, have a look at all import occurrences # first, lets look at anything beginning with "import" and ending with ";" import_iterator = self.re_import.finditer(code) if import_iterator: for import_match in import_iterator: import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces # does this end with an import bindings declaration? # (import bindings always terminate the list of imports) bindings_match = self.re_import_bindings.match(import_match_str) if bindings_match: import_match_str = bindings_match.group(1) # if so, extract the part before the ":" (since the module declaration(s) is/are located there) # split the matching string into a bunch of strings, separated by a comma matches = import_match_str.split(',') for match in matches: alias_match = self.re_import_alias.match(match) if alias_match: # is this an alias declaration? (alias = module name) if so, extract the module name match = alias_match.group(1) lst.append(match) return lst def start(self, node): self.waiting = [node] # while the stack is not empty, add the dependencies while self.waiting: nd = self.waiting.pop(0) self.iter(nd) def iter(self, node): path = node.abspath(self.env) # obtain the absolute path code = "".join(filter_comments(path)) # read the file and filter the comments names = self.get_strings(code) # obtain the import strings for x in names: # optimization if x in self.allnames: continue self.allnames.append(x) # for each name, see if it is like a node or not self.tryfind(x) class d_scanner(Scan.scanner): "scanner for d files" def __init__(self): Scan.scanner.__init__(self) def scan(self, task, node): "look for .d/.di the .d source need" debug("_scan_preprocessor(self, node, env, path_lst)", 'ccroot') gruik = d_parser(task.env(), task.path_lst) gruik.start(node) if Params.g_verbose: debug("nodes found for %s: %s %s" % (str(node), str(gruik.m_nodes), str(gruik.m_names)), 'deps') #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') return (gruik.m_nodes, gruik.m_names) g_d_scanner = d_scanner() "scanner for d programs" def get_target_name(self): "for d programs and libs" v = self.env return v['D_%s_PATTERN' % self.m_type] % self.target class d_taskgen(Object.task_gen): def __init__(self, type='program'): Object.task_gen.__init__(self) self.m_type = type self.subtype = type self.dflags = {'gdc':'', 'dmd':''} self.importpaths = '' self.libs = '' self.libpaths = '' self.uselib = '' self.uselib_local = '' self.inc_paths = [] self.compiled_tasks = [] self.add_objects = [] self.features.append('d') self.inst_var = '' # mark as installable TODO self.vnum = '1.0.0' Object.add_feature('d', D_METHS) def apply_d_libs(self): uselib = self.to_list(self.uselib) seen = [] local_libs = self.to_list(self.uselib_local) libs = [] libpaths = [] env = self.env while local_libs: x = local_libs.pop() # visit dependencies only once if x in seen: continue else: seen.append(x) # object does not exist ? y = Object.name_to_obj(x) if not y: fatal('object not found in uselib_local: obj %s uselib %s' % (self.name, x)) # object has ancestors to process first ? update the list of names if y.uselib_local: added = 0 lst = y.to_list(y.uselib_local) lst.reverse() for u in lst: if u in seen: continue added = 1 local_libs = [u]+local_libs if added: continue # list of names modified, loop # safe to process the current object if not y.m_posted: y.post() seen.append(x) if y.m_type == 'shlib' or y.m_type == 'staticlib': libs.append(y.target) elif y.m_type == 'objects': pass else: error('%s has unknown object type %s, in apply_d_lib_vars, uselib_local.' % (y.name, y.m_type)) # add the link path too tmp_path = y.path.bldpath(env) if not tmp_path in libpaths: libpaths = [tmp_path] + libpaths # set the dependency over the link task if y.link_task is not None: self.link_task.set_run_after(y.link_task) dep_nodes = getattr(self.link_task, 'dep_nodes', []) self.link_task.dep_nodes = dep_nodes + y.link_task.m_outputs # add ancestors uselib too # TODO potential problems with static libraries ? morelibs = y.to_list(y.uselib) for v in morelibs: if v in uselib: continue uselib = [v]+uselib self.uselib = uselib def apply_d_link(self): # if we are only building .o files, tell which ones we build if self.m_type == 'objects': self.out_nodes = [] app = self.out_nodes.append for t in self.compiled_tasks: app(t.m_outputs[0]) return if self.m_type=='staticlib': linktask = self.create_task('ar_link_static', self.env) else: linktask = self.create_task('d_link', self.env) outputs = [] app = outputs.append for t in self.compiled_tasks: app(t.m_outputs[0]) linktask.set_inputs(outputs) linktask.set_outputs(self.path.find_build(get_target_name(self))) self.link_task = linktask def apply_d_vars(self): env = self.env dpath_st = env['DPATH_ST'] lib_st = env['DLIB_ST'] libpath_st = env['DLIBPATH_ST'] dflags = {'gdc':[], 'dmd':[]} importpaths = self.to_list(self.importpaths) libpaths = [] libs = [] uselib = self.to_list(self.uselib) # add compiler flags for i in uselib: if env['DFLAGS_' + i]: for dflag in self.to_list(env['DFLAGS_' + i][env['COMPILER_D']]): if not dflag in dflags[env['COMPILER_D']]: dflags[env['COMPILER_D']] += [dflag] dflags[env['COMPILER_D']] = self.to_list(self.dflags[env['COMPILER_D']]) + dflags[env['COMPILER_D']] for dflag in dflags[env['COMPILER_D']]: if not dflag in env['DFLAGS'][env['COMPILER_D']]: env['DFLAGS'][env['COMPILER_D']] += [dflag] d_shlib_dflags = env['D_' + self.m_type + '_DFLAGS'] if d_shlib_dflags: for dflag in d_shlib_dflags: if not dflag in env['DFLAGS'][env['COMPILER_D']]: env['DFLAGS'][env['COMPILER_D']] += [dflag] env['_DFLAGS'] = env['DFLAGS'][env['COMPILER_D']] # add import paths for i in uselib: if env['DPATH_' + i]: for entry in self.to_list(env['DPATH_' + i]): if not entry in importpaths: importpaths.append(entry) # now process the import paths for path in importpaths: if os.path.isabs(path): env.append_unique('_DIMPORTFLAGS', dpath_st % path) else: node = self.path.find_source_lst(Utils.split_path(path)) self.inc_paths.append(node) env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env)) env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env)) # add library paths for i in uselib: if env['LIBPATH_' + i]: for entry in self.to_list(env['LIBPATH_' + i]): if not entry in libpaths: libpaths += [entry] libpaths = self.to_list(self.libpaths) + libpaths # now process the library paths for path in libpaths: env.append_unique('_DLIBDIRFLAGS', libpath_st % path) # add libraries for i in uselib: if env['LIB_' + i]: for entry in self.to_list(env['LIB_' + i]): if not entry in libs: libs += [entry] libs = libs + self.to_list(self.libs) # now process the libraries for lib in libs: env.append_unique('_DLIBFLAGS', lib_st % lib) # add linker flags for i in uselib: dlinkflags = env['DLINKFLAGS_' + i] if dlinkflags: for linkflag in dlinkflags: env.append_unique('DLINKFLAGS', linkflag) d_shlib_linkflags = env['D_' + self.m_type + '_LINKFLAGS'] if d_shlib_linkflags: for linkflag in d_shlib_linkflags: env.append_unique('DLINKFLAGS', linkflag) def d_hook(self, node): # create the compilation task: cpp or cc task = self.create_task('d', self.env) try: obj_ext = self.obj_ext except AttributeError: obj_ext = '_%s.o' % self.m_type[:2] global g_d_scanner task.m_scanner = g_d_scanner task.path_lst = self.inc_paths #task.defines = self.scanner_defines task.m_inputs = [node] task.m_outputs = [node.change_ext(obj_ext)] self.compiled_tasks.append(task) d_str = '${D_COMPILER} ${_DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}' link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS} ${_DLIBDIRFLAGS} ${_DLIBFLAGS}' Action.simple_action('d', d_str, 'GREEN', prio=100) Action.simple_action('d_link', link_str, color='YELLOW', prio=101) # for feature request #104 def generate_header(self, filename, inst_var, inst_dir): if not hasattr(self, 'header_lst'): self.header_lst = [] self.meths.add('process_header') self.header_lst.append([filename, inst_var, inst_dir]) def process_header(self): env = self.env for i in getattr(self, 'header_lst', []): node = self.path.find_source(i[0]) if not node: fatal('file not found on d obj '+i[0]) task = self.create_task('d_header', env, 2) task.set_inputs(node) task.set_outputs(node.change_ext('.di')) d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}' Action.simple_action('d_header', d_header_str, color='BLUE', prio=80) # quick test # if __name__ == "__main__": #Params.g_verbose = 2 #Params.g_zones = ['preproc'] #class dum: # def __init__(self): # self.parse_cache_d = {} #Params.g_build = dum() try: arg = sys.argv[1] except IndexError: arg = "file.d" print "".join(filter_comments(arg)) # TODO paths = ['.'] #gruik = filter() #gruik.start(arg) #code = "".join(gruik.buf) #print "we have found the following code" #print code #print "now parsing" #print "-------------------------------------------" """ parser_ = d_parser() parser_.start(arg) print "module: %s" % parser_.module print "imports: ", for imp in parser_.imports: print imp + " ", print """ taskgen(apply_d_libs) feature('d')(apply_d_libs) after('apply_d_link')(apply_d_libs) before('apply_vnum')(apply_d_libs) taskgen(apply_d_link) feature('d')(apply_d_link) after('apply_core')(apply_d_link) taskgen(apply_d_vars) feature('d')(apply_d_vars) after('apply_core')(apply_d_vars) extension(EXT_D)(d_hook) taskgen(generate_header) taskgen(process_header) before('apply_core')(process_header) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Common.py0000664000175000017500000001070410771525014021465 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Important functions: install_files, install_as, symlink_as (destdir is taken into account)" import os, types, shutil, glob import Params, Utils from Params import error, fatal class InstallError(Exception): pass def check_dir(dir): #print "check dir ", dir try: os.stat(dir) except OSError: try: os.makedirs(dir) except OSError: fatal("Cannot create folder " + dir) def do_install(src, tgt, chmod=0644): if Params.g_commands['install']: # check if the file is already there to avoid a copy _do_install = 1 if not Params.g_options.force: try: t1 = os.stat(tgt).st_mtime t2 = os.stat(src).st_mtime if t1 >= t2: _do_install = 0 except OSError: _do_install = 1 if _do_install: srclbl = src try: srclbl = src.replace(Params.g_build.m_bldnode.abspath(None)+os.sep, '') srclbl = src.replace(Params.g_build.m_srcnode.abspath(None)+os.sep, '') except OSError: pass print "* installing %s as %s" % (srclbl, tgt) # followig is for shared libs and stale inodes try: os.remove(tgt) except OSError: pass try: shutil.copy2(src, tgt) os.chmod(tgt, chmod) except IOError: try: os.stat(src) except IOError: error('file %s does not exist' % str(src)) fatal('Could not install the file %s' % str(tgt)) elif Params.g_commands['uninstall']: print "* uninstalling %s" % tgt Params.g_build.m_uninstall.append(tgt) try: os.remove(tgt) except OSError: pass def path_install(var, subdir, env=None): bld = Params.g_build if not env: env=Params.g_build.m_allenvs['default'] destpath = env[var] if not destpath: error("Installing: to set a destination folder use env['%s']" % (var)) destpath = var destdir = env.get_destdir() if destdir: destpath = os.path.join(destdir, destpath.lstrip(os.sep)) if subdir: destpath = os.path.join(destpath, subdir.lstrip(os.sep)) return destpath def install_files(var, subdir, files, env=None, chmod=0644): if not Params.g_install: return if not var: return bld = Params.g_build if not env: env = bld.m_allenvs['default'] destpath = env[var] # the variable can be an empty string and the subdir an absolute path if destpath is [] and subdir: return node = bld.m_curdirnode if type(files) is types.StringType: if '*' in files: gl = node.abspath()+os.sep+files lst = glob.glob(gl) else: lst = files.split() else: lst = files destdir = env.get_destdir() if destdir: destpath = os.path.join(destdir, destpath.lstrip(os.sep)) if subdir: destpath = os.path.join(destpath, subdir.lstrip(os.sep)) check_dir(destpath) # copy the files to the final destination for filename in lst: if not os.path.isabs(filename): alst = Utils.split_path(filename) filenode = node.find_build_lst(alst, create=1) file = filenode.abspath(env) destfile = os.path.join(destpath, filenode.m_name) else: file = filename alst = Utils.split_path(filename) destfile = os.path.join(destpath, alst[-1]) do_install(file, destfile, chmod=chmod) def install_as(var, destfile, srcfile, env=None, chmod=0644): if not Params.g_install: return if var == 0: return bld = Params.g_build if not env: env=Params.g_build.m_allenvs['default'] node = bld.m_curdirnode tgt = env[var] destdir = env.get_destdir() if destdir: tgt = os.path.join(destdir, tgt.lstrip(os.sep)) tgt = os.path.join(tgt, destfile.lstrip(os.sep)) dir, name = os.path.split(tgt) check_dir(dir) # the source path if not os.path.isabs(srcfile): alst = Utils.split_path(srcfile) filenode = node.find_build_lst(alst, create=1) src = filenode.abspath(env) else: src = srcfile do_install(src, tgt, chmod=chmod) def symlink_as(var, src, dest, env=None): if not Params.g_install: return if var == 0: return bld = Params.g_build if not env: env=Params.g_build.m_allenvs['default'] node = bld.m_curdirnode tgt = env[var] destdir = env.get_destdir() if destdir: tgt = os.path.join(destdir, tgt.lstrip(os.sep)) tgt = os.path.join(tgt, dest.lstrip(os.sep)) dir, name = os.path.split(tgt) check_dir(dir) if Params.g_commands['install']: try: if not os.path.islink(tgt) or os.readlink(tgt) != src: print "* symlink %s (-> %s)" % (tgt, src) os.symlink(src, tgt) return 0 except OSError: return 1 elif Params.g_commands['uninstall']: try: print "* removing %s" % (tgt) os.remove(tgt) return 0 except OSError: return 1 lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Scripting.py0000664000175000017500000004135410772251134022204 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Module called for configuring, compiling and installing targets" import os, sys, cPickle, traceback import Params, Utils, Configure, Build, Runner, Options import shutil from Params import error, fatal, warning, g_lockfile from Constants import * g_gz='bz2' g_dirwatch = None g_daemonlock = 0 g_excludes = '.svn CVS .arch-ids {arch} SCCS BitKeeper .hg'.split() "exclude folders from dist" g_dist_exts = '~ .rej .orig .pyc .pyo .bak config.log .tar.bz2 .zip Makefile Makefile.in'.split() "exclude files from dist" g_distclean_exts = '~ .pyc .wafpickle'.split() def add_subdir(dir, bld): "each wscript calls bld.add_subdir" try: bld.rescan(bld.m_curdirnode) except OSError: fatal("No such directory "+bld.m_curdirnode.abspath()) old = bld.m_curdirnode new = bld.m_curdirnode.ensure_node_from_lst(Utils.split_path(dir)) if new is None: fatal("subdir not found (%s), restore is %s" % (dir, bld.m_curdirnode)) bld.m_curdirnode=new # try to open 'wscript_build' for execution # if unavailable, open the module wscript and call the build function from it from Common import install_files, install_as, symlink_as # do not remove try: file_path = os.path.join(new.abspath(), WSCRIPT_BUILD_FILE) file = open(file_path, 'r') exec file if file: file.close() except IOError: file_path = os.path.join(new.abspath(), WSCRIPT_FILE) module = Utils.load_module(file_path) module.build(bld) # restore the old node position bld.m_curdirnode=old # #node = bld.m_curdirnode.ensure_node_from_lst(Utils.split_path(dir)) #if node is None: # fatal("subdir not found (%s), restore is %s" % (dir, bld.m_curdirnode)) #bld.m_subdirs = [[node, bld.m_curdirnode]] + bld.m_subdirs def call_back(idxName, pathName, event): #print "idxName=%s, Path=%s, Event=%s "%(idxName, pathName, event) # check the daemon lock state global g_daemonlock if g_daemonlock: return g_daemonlock = 1 # clean up existing variables, and start a new instance Utils.reset() main() g_daemonlock = 0 def start_daemon(): "if it does not exist already:start a new directory watcher; else: return immediately" global g_dirwatch if not g_dirwatch: import DirWatch g_dirwatch = DirWatch.DirectoryWatcher() m_dirs=[] for nodeDir in Params.g_build.m_srcnode.dirs(): tmpstr = "%s" %nodeDir tmpstr = "%s" %(tmpstr[3:])[:-1] m_dirs.append(tmpstr) g_dirwatch.add_watch("tmp Test", call_back, m_dirs) # infinite loop, no need to exit except on ctrl+c g_dirwatch.loop() g_dirwatch = None else: g_dirwatch.suspend_all_watch() m_dirs=[] for nodeDir in Params.g_build.m_srcnode.dirs(): tmpstr = "%s" %nodeDir tmpstr = "%s" %(tmpstr[3:])[:-1] m_dirs.append(tmpstr) g_dirwatch.add_watch("tmp Test", call_back, m_dirs) def configure(): # disable parallelization while configuring jobs_save = Params.g_options.jobs Params.g_options.jobs = 1 Runner.set_exec('normal') tree = Build.Build() err = 'The %s is not given in %s:\n * define a top level attribute named "%s"\n * run waf configure --%s=xxx' src = getattr(Params.g_options, SRCDIR, None) if not src: src = getattr(Utils.g_module, SRCDIR, None) if not src: fatal(err % (SRCDIR, os.path.abspath('.'), SRCDIR, SRCDIR)) bld = getattr(Params.g_options, BLDDIR, None) if not bld: bld = getattr(Utils.g_module, BLDDIR, None) if not bld: fatal(err % (BLDDIR, os.path.abspath('.'), BLDDIR, BLDDIR)) Params.g_cachedir = os.path.join(bld, CACHE_DIR) tree.load_dirs(src, bld, isconfigure=1) conf = Configure.Configure(srcdir=src, blddir=bld) try: # calling to main wscript's configure() conf.sub_config('') except Configure.ConfigurationError, e: fatal(str(e), 2) except Exception: Utils.test_full() raise conf.store(tree) conf.cleanup() # this will write a configure lock so that subsequent run will # consider the current path as the root directory, to remove: use 'waf distclean' file = open(g_lockfile, 'w') file.write proj = {} proj[BLDDIR] = bld proj[SRCDIR] = src proj['argv'] = sys.argv[1:] proj['hash'] = conf.hash proj['files'] = conf.files cPickle.dump(proj, file) file.close() # restore -j option Params.g_options.jobs = jobs_save def read_cache_file(filename): file = open(g_lockfile, 'r') proj = cPickle.load(file) file.close() return proj def prepare(): # some command-line options can be processed immediately if '--version' in sys.argv: opt_obj = Options.Handler() opt_obj.parse_args() sys.exit(0) # now find the wscript file msg1 = 'Waf: *** Nothing to do! Please run waf from a directory containing a file named "%s"' % WSCRIPT_FILE # Some people want to configure their projects gcc-style: # mkdir build && cd build && ../waf configure && ../waf # check that this is really what is wanted build_dir_override = None candidate = None cwd = Params.g_cwd_launch lst = os.listdir(cwd) xml = 0 #check if a wscript or a wscript_xml file is in current directory if WSCRIPT_FILE in lst or WSCRIPT_BUILD_FILE in lst or 'wscript_xml' in lst: # if a script is in current directory, use this directory as candidate (and prevent gcc-like configuration) candidate = cwd elif 'configure' in sys.argv: # gcc-like configuration build_dir_override = cwd try: #check the following dirs for wscript or wscript_xml search_for_candidate = True if not candidate: #check first the calldir if there is wscript or wscript_xml #for example: /usr/src/configure the calldir would be /usr/src calldir = os.path.abspath(os.path.dirname(sys.argv[0])) lst_calldir = os.listdir(calldir) if WSCRIPT_FILE in lst_calldir: candidate = calldir search_for_candidate = False if 'wscript_xml' in lst_calldir: candidate = calldir xml = 1 search_for_candidate = False if "--make-waf" in sys.argv and candidate: search_for_candidate = False #check all directories above current dir for wscript or wscript_xml if still not found while search_for_candidate: if len(cwd) <= 3: break # stop at / or c: dirlst = os.listdir(cwd) if WSCRIPT_FILE in dirlst: candidate = cwd xml = 0 if 'wscript_xml' in dirlst: candidate = cwd xml = 1 break if 'configure' in sys.argv and candidate: break if Params.g_lockfile in dirlst: break cwd = cwd[:cwd.rfind(os.sep)] # climb up except Exception: traceback.print_stack() fatal(msg1) if not candidate: # check if the user only wanted to display the help if '-h' in sys.argv or '--help' in sys.argv: warning('No wscript file found: the help message may be incomplete') opt_obj = Options.Handler() opt_obj.parse_args() sys.exit(0) else: fatal(msg1) # We have found wscript, but there is no guarantee that it is valid os.chdir(candidate) if xml: # the xml module is not provided by default, you will have to import it yourself from XMLScripting import compile compile(candidate+os.sep+'wscript_xml') else: # define the main module containing the functions init, shutdown, .. Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE)) if build_dir_override: d = getattr(Utils.g_module, BLDDIR, None) if d: # test if user has set the blddir in wscript. msg = 'Overriding build directory %s with %s' % (d, build_dir_override) Params.niceprint(msg, 'WARNING', 'waf') Utils.g_module.blddir = build_dir_override # fetch the custom command-line options recursively and in a procedural way opt_obj = Options.Handler() # will call to main wscript's set_options() opt_obj.sub_options('') opt_obj.parse_args() # use the parser results if Params.g_commands['dist']: # try to use the user-defined dist function first, fallback to the waf scheme fun = getattr(Utils.g_module, 'dist', None) if fun: fun(); sys.exit(0) appname = getattr(Utils.g_module, APPNAME, 'noname') get_version = getattr(Utils.g_module, 'get_version', None) if get_version: version = get_version() else: version = getattr(Utils.g_module, VERSION, None) if not version: version = '1.0' from Scripting import Dist Dist(appname, version) sys.exit(0) elif Params.g_commands['distclean']: # try to use the user-defined distclean first, fallback to the waf scheme fun = getattr(Utils.g_module, 'distclean', None) if fun: fun() else: DistClean() sys.exit(0) elif Params.g_commands['distcheck']: # try to use the user-defined dist function first, fallback to the waf scheme fun = getattr(Utils.g_module, 'dist', None) if fun: fun(); sys.exit(0) appname = getattr(Utils.g_module, APPNAME, 'noname') get_version = getattr(Utils.g_module, 'get_version', None) if get_version: version = get_version() else: version = getattr(Utils.g_module, VERSION, None) if not version: version = '1.0' DistCheck(appname, version) sys.exit(0) fun=getattr(Utils.g_module, 'init', None) if fun: fun() main() def main(): import inspect if Params.g_commands['configure']: configure() Params.pprint('GREEN', 'Configuration finished successfully; project is now ready to build.') sys.exit(0) Runner.set_exec('noredir') # compile the project and/or install the files bld = Build.Build() try: proj = read_cache_file(g_lockfile) except IOError: if Params.g_commands['clean']: fatal("Nothing to clean (project not configured)", ret=0) else: warning("Run waf configure first (project not configured)") if Params.g_autoconfig: configure() bld = Build.Build() proj = read_cache_file(g_lockfile) else: sys.exit(0) if Params.g_autoconfig: reconf = 0 hash = 0 try: for file in proj['files']: mod = Utils.load_module(file) hash = Params.hash_function_with_globals(hash, mod.configure) reconf = (hash != proj['hash']) except Exception, ex: if Params.g_verbose: traceback.print_exc() warning("Reconfiguring the project (an exception occured: %s)" % (str(ex),)) reconf = 1 if reconf: warning("Reconfiguring the project (the configuration has changed)") a1 = Params.g_commands a2 = Params.g_options a3 = Params.g_zones a4 = Params.g_verbose Options.g_parser.parse_args(args=proj['argv']) configure() Params.g_commands = a1 Params.g_options = a2 Params.g_zones = a3 Params.g_verbose = a4 bld = Build.Build() proj = read_cache_file(g_lockfile) Params.g_cachedir = os.path.join(proj[BLDDIR], CACHE_DIR) bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) bld.load_envs() try: # calling to main wscript's build() f = Utils.g_module.build except AttributeError: fatal("Could not find the function 'def build(bld):' in wscript") else: f(bld) # TODO undocumented hook pre_build = getattr(Utils.g_module, 'pre_build', None) if pre_build: pre_build() # compile if Params.g_commands['build'] or Params.g_install: try: # TODO quite ugly, no? if not Params.g_commands['build'] and not Params.g_commands['install']: import Task def must_run(self): return 0 setattr(Task.Task, 'must_run', must_run) bld.compile() #import cProfile, pstats #cProfile.run("Params.g_build.compile()", 'profi.txt') #p = pstats.Stats('profi.txt') #p.sort_stats('time').print_stats(20) except Build.BuildError, e: if not Params.g_options.daemon: fatal(e.get_message(), 1) else: error(e.get_message()) else: if Params.g_options.progress_bar: print '' if Params.g_commands['install'] or Params.g_commands['uninstall']: bld.install() if Params.g_commands['install']: msg = 'Compilation and installation finished successfully' elif Params.g_commands['uninstall']: msg = 'Uninstallation finished successfully' else: msg = 'Compilation finished successfully' Params.pprint('GREEN', msg) # clean if Params.g_commands['clean']: try: bld.clean() Params.pprint('GREEN', 'Cleaning finished successfully') finally: bld.save() #if ret: # msg='Cleanup failed for a mysterious reason' # error(msg) # daemon look here if Params.g_options.daemon and Params.g_commands['build']: start_daemon() return # shutdown fun = getattr(Utils.g_module, 'shutdown', None) if fun: fun() ## Note: this is a modified version of shutil.copytree from python ## 2.5.2 library; modified for WAF purposes to exclude dot dirs and ## another list of files. def copytree(src, dst, symlinks=False, excludes=(), build_dir=None): names = os.listdir(src) os.makedirs(dst) errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): if name in excludes: continue elif name.startswith('.') or name.startswith(',,') or name.startswith('++'): continue elif name == build_dir: continue else: ## build_dir is not passed into the recursive ## copytree, but that is intentional; it is a ## directory name valid only at the top level. copytree(srcname, dstname, symlinks, excludes) else: ends = name.endswith to_remove = False if name.startswith('.') or name.startswith('++'): to_remove = True else: for x in g_dist_exts: if ends(x): to_remove = True break if not to_remove: shutil.copy2(srcname, dstname) # XXX What about devices, sockets etc.? except (IOError, os.error), why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files except shutil.Error, err: errors.extend(err.args[0]) try: shutil.copystat(src, dst) except WindowsError: # can't copy file access times on Windows pass except OSError, why: errors.extend((src, dst, str(why))) if errors: raise shutil.Error, errors def DistDir(appname, version): "make a distribution directory with all the sources in it" # Our temporary folder where to put our files TMPFOLDER=appname+'-'+version # Remove an old package directory if os.path.exists(TMPFOLDER): shutil.rmtree(TMPFOLDER) global g_dist_exts, g_excludes # Remove the Build dir build_dir = getattr(Utils.g_module, BLDDIR, None) # Copy everything into the new folder copytree('.', TMPFOLDER, excludes=g_excludes, build_dir=build_dir) # TODO undocumented hook dist_hook = getattr(Utils.g_module, 'dist_hook', None) if dist_hook: os.chdir(TMPFOLDER) try: dist_hook() finally: # go back to the root directory os.chdir('..') return TMPFOLDER def DistTarball(appname, version): """make a tarball with all the sources in it; return (distdirname, tarballname)""" import tarfile TMPFOLDER = DistDir(appname, version) tar = tarfile.open(TMPFOLDER+'.tar.'+g_gz,'w:'+g_gz) tar.add(TMPFOLDER) tar.close() Params.pprint('GREEN', 'Your archive is ready -> %s.tar.%s' % (TMPFOLDER, g_gz)) if os.path.exists(TMPFOLDER): shutil.rmtree(TMPFOLDER) return (TMPFOLDER, TMPFOLDER+'.tar.'+g_gz) def Dist(appname, version): """make a tarball with all the sources in it""" DistTarball(appname, version) sys.exit(0) def DistClean(): """clean the project""" # remove the temporary files # the builddir is given by lock-wscript only # we do no try to remove it if there is no lock file (rmtree) for (root, dirs, filenames) in os.walk('.'): for f in list(filenames): to_remove = 0 if f==g_lockfile: # removes a lock, and the builddir indicated to_remove = True try: proj = read_cache_file(os.path.join(root, f)) shutil.rmtree(os.path.join(root, proj[BLDDIR])) except (OSError, IOError): # ignore errors if the lockfile or the builddir not exist. pass else: ends = f.endswith for x in g_distclean_exts: if ends(x): to_remove = 1 break if to_remove: os.remove(os.path.join(root, f)) lst = os.listdir('.') for f in lst: if f.startswith('.waf-'): shutil.rmtree(f, ignore_errors=True) sys.exit(0) def DistCheck(appname, version): """Makes some sanity checks on the waf dist generated tarball""" import tempfile import pproc as subprocess waf = os.path.abspath(sys.argv[0]) distdir, tarball = DistTarball(appname, version) retval = subprocess.Popen('bzip2 -dc %s | tar x' % tarball, shell=True).wait() if retval: Params.fatal('uncompressing the tarball failed with code %i' % (retval)) instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version)) cwd_before = os.getcwd() os.chdir(distdir) try: retval = subprocess.Popen( '%(waf)s configure && %(waf)s ' '&& %(waf)s check && %(waf)s install --destdir=%(instdir)s' ' && %(waf)s uninstall --destdir=%(instdir)s' % vars(), shell=True).wait() if retval: Params.fatal('distcheck failed with code %i' % (retval)) finally: os.chdir(cwd_before) shutil.rmtree(distdir) if os.path.exists(instdir): Params.fatal("distcheck succeeded, but files were left in %s" % (instdir)) else: Params.pprint('GREEN', "distcheck finished successfully") lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Params.py0000664000175000017500000001334010771525014021457 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) "Main parameters" import os, sys, types, inspect, base64, time try: from hashlib import md5 except ImportError: from md5 import md5 import Constants, Utils # updated from the top-level wscript g_version="1.3.3" g_rootname = '' g_progress = '\x1b[K%s%s%s\r' if sys.platform=='win32': # get the first two letters (c:) g_rootname = os.getcwd()[:2] g_progress = '\x1b[A\x1b[K%s%s%s\r' g_autoconfig = 0 "reconfigure the project automatically" sig_nil = 'iluvcuteoverload' # =================================== # # Constants set on runtime g_cwd_launch = None "directory from which waf was called" g_tooldir='' "Tools directory (used in particular by Environment.py)" g_options = None "Parsed command-line arguments in the options module" g_commands = {} "build, configure, .." g_verbose = 0 "-v: warnings, -vv: developer info, -vvv: all info" g_build = None "only one build object is active at a time" g_platform = sys.platform "current platform" g_cache_global = '' "config cache directory" g_conf_name = 'conf-runs-%s-%d.pickle' % (sys.platform, Constants.ABI) g_install = 0 "true if install or uninstall is set" try: g_cache_global = os.path.abspath(os.environ['WAFCACHE']) except KeyError: pass try: g_lockfile = os.environ['WAFLOCK'] except KeyError: g_lockfile = '.lock-wscript' # =================================== # # HELPERS #g_col_names = ['BOLD', 'RED', 'REDP', 'GREEN', 'YELLOW', 'BLUE', 'CYAN', 'NORMAL'] #"color names" g_col_scheme = [1, 91, 33, 92, 93, 94, 96, 0] g_colors = { 'BOLD' :'\033[01;1m', 'RED' :'\033[01;91m', 'REDP' :'\033[01;33m', 'GREEN' :'\033[01;92m', 'YELLOW':'\033[00;33m', 'PINK' :'\033[00;35m', 'BLUE' :'\033[01;34m', 'CYAN' :'\033[01;36m', 'NORMAL':'\033[0m' } "colors used for printing messages" g_cursor_on ='\x1b[?25h' g_cursor_off='\x1b[?25l' def reset_colors(): global g_colors for k in g_colors.keys(): g_colors[k]='' g_cursor_on='' g_cursor_off='' if (sys.platform=='win32') or ('NOCOLOR' in os.environ) \ or (os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']) \ or (not sys.stdout.isatty()): reset_colors() def pprint(col, str, label=''): try: mycol=g_colors[col] except KeyError: mycol='' print "%s%s%s %s" % (mycol, str, g_colors['NORMAL'], label) g_levels={ 'Action' : 'GREEN', 'Build' : 'CYAN', 'KDE' : 'REDP', 'Node' : 'GREEN', 'Object' : 'GREEN', 'Runner' : 'REDP', 'Task' : 'GREEN', 'Test' : 'GREEN', } g_zones = [] def set_trace(a, b, c): Utils.g_trace=a Utils.g_debug=b Utils.g_error=c def get_trace(): return (Utils.g_trace, Utils.g_debug, Utils.g_error) def niceprint(msg, type='', module=''): #if not module: # print '%s: %s'% (type, msg) # return if type=='ERROR': print '%s %s <%s> %s %s'% (type, g_colors['RED'], module, g_colors['NORMAL'], msg) return if type=='WARNING': print '%s %s <%s> %s %s'% (type, g_colors['RED'], module, g_colors['NORMAL'], msg) return if type=='DEBUG': print '%s %s <%s> %s %s'% (type, g_colors['CYAN'], module, g_colors['NORMAL'], msg) return if module in g_levels: print '%s %s <%s> %s %s'% (type, g_colors[g_levels[module]], module, g_colors['NORMAL'], msg) return print 'TRACE <%s> %s'% (module, msg) def __get_module(): try: return inspect.stack()[2][0].f_globals['__name__'] except (IndexError, KeyError): return "unknown" def debug(msg, zone=None): global g_zones, g_verbose if g_zones: if (not zone in g_zones) and (not '*' in g_zones): return elif not g_verbose>2: return module = __get_module() msg = time.strftime('%%X %s' % msg) niceprint(msg, 'DEBUG', module) def warning(msg, zone=0): module = __get_module() niceprint(msg, 'WARNING', module) def error(msg): if not Utils.g_error: return module = __get_module() niceprint(msg, 'ERROR', module) def fatal(msg, ret=1): module = __get_module() if g_verbose > 0: pprint('RED', '%s \n (error raised in module %s)' % (msg, module)) else: pprint('RED', '%s' % msg) if g_verbose > 1: import traceback traceback.print_stack() sys.exit(ret) def view_sig(s): "used for displaying signatures" if type(s) is types.StringType: n = base64.encodestring(s) return n[:-2] else: return str(s) def hash_sig(o1, o2): "hash two signatures" m = md5() m.update(o1) m.update(o2) return m.digest() def h_file(filename): f = file(filename,'rb') m = md5() readBytes = 100000 while (readBytes): readString = f.read(readBytes) m.update(readString) readBytes = len(readString) f.close() return m.digest() # Another possibility, faster but less accurate # based on the path, md5 hashing can be used for some files and timestamp for others #def h_file(filename): # st = os.stat(filename) # m = md5() # m.update(st.st_mtime) # m.update(st.st_size) # return m.digest() def h_string(str): m = md5() m.update(str) return m.digest() def h_list(lst): m = md5() m.update(str(lst)) return m.digest() _hash_blacklist_types = ( types.BuiltinFunctionType, types.ModuleType, types.FunctionType, types.ClassType, types.TypeType, types.NoneType, ) def hash_function_with_globals(prevhash, func): """ hash a function (object) and the global vars needed from outside ignore unhashable global variables (lists) prevhash -- previous hash value to be combined with this one; if there is no previous value, zero should be used here func -- a Python function object. """ assert type(func) is types.FunctionType for name, value in func.func_globals.iteritems(): if type(value) in _hash_blacklist_types: continue try: prevhash = hash( (prevhash, name, value) ) except TypeError: # raised for unhashable elements pass #else: # print "hashed: ", name, " => ", value, " => ", hash(value) return hash( (prevhash, inspect.getsource(func)) ) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Configure.py0000664000175000017500000002373110772251134022162 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) """ Configuration system A configuration instance is created when "waf configure" is called, it is used to: * create data dictionaries (Environment instances) * store the list of modules to import The old model (copied from Scons) was to store logic (mapping file extensions to functions) along with the data. In Waf a way was found to separate that logic by adding an indirection layer (storing the names in the Environment instances) In the new model, the logic is more object-oriented, and the user scripts provide the logic. The data files (Environments) must contain configuration data only (flags, ..). Note: the c/c++ related code is in the module config_c """ import os, types, imp, cPickle, sys, shlex, warnings try: from hashlib import md5 except ImportError: from md5 import md5 import Action, Params, Environment, Runner, Build, Utils, Object from Params import fatal, warning from Constants import * TEST_OK = True class ConfigurationError(Exception): pass g_maxlen = 40 """initial length of configuration messages""" g_debug = 0 """enable/disable debug""" g_stdincpath = ['/usr/include/', '/usr/local/include/'] """standard include paths""" g_stdlibpath = ['/usr/lib/', '/usr/local/lib/', '/lib'] """standard library search paths""" ##################### ## Helper functions def find_file(filename, path_list): """find a file in a list of paths @param filename: name of the file to search for @param path_list: list of directories to search @return: the first occurrence filename or '' if filename could not be found """ if type(path_list) is types.StringType: lst = path_list.split() else: lst = path_list for directory in lst: if os.path.exists( os.path.join(directory, filename) ): return directory return '' def find_file_ext(filename, path_list): """find a file in a list of paths using fnmatch @param filename: name of the file to search for @param path_list: list of directories to search @return: the first occurrence filename or '' if filename could not be found """ import fnmatch if type(path_list) is types.StringType: lst = path_list.split() else: lst = path_list for directory in lst: for path, subdirs, files in os.walk(directory): for name in files: if fnmatch.fnmatch(name, filename): return path return '' def find_program_impl(env, filename, path_list=[], var=None): """find a program in folders path_lst, and sets env[var] @param env: environment @param filename: name of the program to search for @param path_list: list of directories to search for filename @param var: environment value to be checked for in env or os.environ @return: either the value that is referenced with [var] in env or os.environ or the first occurrence filename or '' if filename could not be found """ try: path_list = path_list.split() except AttributeError: pass if var: if var in os.environ: env[var] = os.environ[var] if env[var]: return env[var] if not path_list: path_list = os.environ['PATH'].split(os.pathsep) if Params.g_platform=='win32': # TODO isnt fnmatch for this? for y in [filename+x for x in '.exe,.com,.bat,.cmd'.split(',')]: for directory in path_list: x = os.path.join(directory, y) if os.path.isfile(x): if var: env[var] = x return x else: for directory in path_list: x = os.path.join(directory, filename) if os.access(x, os.X_OK) and os.path.isfile(x): if var: env[var] = x return x return '' class Configure(object): def __init__(self, env=None, blddir='', srcdir=''): self.env = None self.m_envname = '' self.m_blddir = blddir self.m_srcdir = srcdir self.m_allenvs = {} self.defines = {} self.configheader = 'config.h' self.cwd = os.getcwd() self.tools = [] # tools loaded in the configuration, and that will be loaded when building self.setenv('default') self.m_cache_table = {} self.lastprog = '' # load the cache if Params.g_cache_global and not Params.g_options.nocache: fic = os.path.join(Params.g_cache_global, Params.g_conf_name) try: file = open(fic, 'rb') except (OSError, IOError): pass else: try: self.m_cache_table = cPickle.load(file) finally: file.close() self._a = 0 self._b = 0 self._c = 0 self._quiet = 0 self.hash = 0 self.files = [] def errormsg(self, msg): Params.niceprint(msg, 'ERROR', 'Configuration') def fatal(self, msg): raise ConfigurationError(msg) def check_tool(self, input, tooldir=None): "load a waf tool" tools = Utils.to_list(input) if tooldir: tooldir = Utils.to_list(tooldir) for tool in tools: try: file,name,desc = imp.find_module(tool, tooldir) except ImportError, ex: raise ConfigurationError("no tool named '%s' found (%s)" % (tool, str(ex))) module = imp.load_module(tool, file, name, desc) func = getattr(module, 'detect', None) if func: func(self) self.tools.append({'tool':tool, 'tooldir':tooldir}) def sub_config(self, dir): "executes the configure function of a wscript module" current = self.cwd self.cwd = os.path.join(self.cwd, dir) cur = os.path.join(self.cwd, WSCRIPT_FILE) try: mod = Utils.load_module(cur) except IOError: fatal("the wscript file %s was not found." % cur) if not hasattr(mod, 'configure'): fatal('the module %s has no configure function; make sure such a function is defined' % cur) ret = mod.configure(self) if Params.g_autoconfig: self.hash = Params.hash_function_with_globals(self.hash, mod.configure) self.files.append(os.path.abspath(cur)) self.cwd = current return ret def store(self, file=''): "save the config results into the cache file" if not os.path.isdir(Params.g_cachedir): os.makedirs(Params.g_cachedir) file = open(os.path.join(Params.g_cachedir, 'build.config.py'), 'w') file.write('version = %s\n' % HEXVERSION) file.write('tools = %r\n' % self.tools) file.close() if not self.m_allenvs: fatal("nothing to store in Configure !") for key in self.m_allenvs: tmpenv = self.m_allenvs[key] tmpenv.store(os.path.join(Params.g_cachedir, key+CACHE_SUFFIX)) def cleanup(self): "when there is a cache directory store the config results (shutdown)" if not Params.g_cache_global: return # not during the build if not os.path.isdir(Params.g_cache_global): os.makedirs(Params.g_cache_global) fic = os.path.join(Params.g_cache_global, Params.g_conf_name) file = open(fic, 'wb') try: cPickle.dump(self.m_cache_table, file) finally: file.close() def set_env_name(self, name, env): "add a new environment called name" self.m_allenvs[name] = env return env def retrieve(self, name, fromenv=None): "retrieve an environment called name" try: env = self.m_allenvs[name] except KeyError: env = Environment.Environment() self.m_allenvs[name] = env else: if fromenv: warning("The environment %s may have been configured already" % name) return env def setenv(self, name): "enable the environment called name" self.env = self.retrieve(name) self.envname = name def add_os_flags(self, var, dest=None): if not dest: dest = var # do not use 'get' to make certain the variable is not defined try: self.env[dest] = os.environ[var] except KeyError: pass def check_message(self,type,msg,state,option=''): "print an checking message. This function is used by other checking functions" sr = 'Checking for ' + type + ' ' + msg global g_maxlen g_maxlen = max(g_maxlen, len(sr)) print "%s :" % sr.ljust(g_maxlen), p = Params.pprint if state: p('GREEN', 'ok ' + option) else: p('YELLOW', 'not found') def check_message_custom(self,type,msg,custom,option=''): """print an checking message. This function is used by other checking functions""" sr = 'Checking for ' + type + ' ' + msg global g_maxlen g_maxlen = max(g_maxlen, len(sr)) print "%s :" % sr.ljust(g_maxlen), Params.pprint('CYAN', custom) def hook(self, func): "attach the function given as input as new method" setattr(self.__class__, func.__name__, func) def mute_logging(self): "mutes the output temporarily" if Params.g_options.verbose: return # store the settings (self._a, self._b, self._c) = Params.get_trace() self._quiet = Runner.g_quiet # then mute if not g_debug: Params.set_trace(0, 0, 0) Runner.g_quiet = 1 def restore_logging(self): "see mute_logging" if Params.g_options.verbose: return # restore the settings if not g_debug: Params.set_trace(self._a, self._b, self._c) Runner.g_quiet = self._quiet def find_program(self, program_name, path_list=[], var=None): "wrapper provided for convenience" ret = find_program_impl(self.env, program_name, path_list, var) self.check_message('program', program_name, ret, ret) return ret def check_pkg(self, modname, destvar='', vnum='', pkgpath='', pkgbin='', pkgvars=[], pkgdefs={}, mandatory=False): "wrapper provided for convenience" pkgconf = self.create_pkgconfig_configurator() if not destvar: destvar = modname.upper() pkgconf.uselib = destvar pkgconf.name = modname pkgconf.version = vnum if pkgpath: pkgconf.pkgpath = pkgpath pkgconf.binary = pkgbin pkgconf.variables = pkgvars pkgconf.defines = pkgdefs pkgconf.mandatory = mandatory return pkgconf.run() def pkgconfig_fetch_variable(self,pkgname,variable,pkgpath='',pkgbin='',pkgversion=0,env=None): if not env: env=self.env if not pkgbin: pkgbin='pkg-config' if pkgpath: pkgpath='PKG_CONFIG_PATH=$PKG_CONFIG_PATH:'+pkgpath pkgcom = '%s %s' % (pkgpath, pkgbin) if pkgversion: ret = os.popen("%s --atleast-version=%s %s" % (pkgcom, pkgversion, pkgname)).close() self.conf.check_message('package %s >= %s' % (pkgname, pkgversion), '', not ret) if ret: return '' # error else: ret = os.popen("%s %s" % (pkgcom, pkgname)).close() self.check_message('package %s ' % (pkgname), '', not ret) if ret: return '' # error return os.popen('%s --variable=%s %s' % (pkgcom, variable, pkgname)).read().strip() # do not touch import config_c lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/FallbackAdaptor.py0000664000175000017500000001061410771525014023247 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Matthias Jahn 2006 """ Fallback WatchMonitor should work anywhere ..;-) this do not depends on gamin or fam instead it polls for changes it works at least under linux ... windows or other *nix are untested """ import os, time support = True class Fallback: class Helper: def __init__( self, callBack, userdata ): self.currentFiles = {} self.oldFiles = {} self.__firstRun = True self.callBack = callBack self.userdata = userdata def isFirstRun( self ): if self.__firstRun: self.__firstRun = False return True else: return False def __init__( self ): self.__dirs = {} #event lists for changed and deleted self.__changeLog = {} def __traversal( self, dirName ): """Traversal function for directories Basic principle: all_files is a dictionary mapping paths to modification times. We repeatedly crawl through the directory tree rooted at 'path', doing a stat() on each file and comparing the modification time. """ files = os.listdir( dirName ) firstRun = self.__dirs[dirName].isFirstRun() for filename in files: path = os.path.join( dirName, filename ) try: fileStat = os.stat( path ) except os.error: # If a file has been deleted since the lsdir # scanning the directory and now, we'll get an # os.error here. Just ignore it -- we'll report # the deletion on the next pass through the main loop. continue modifyTime = self.__dirs[dirName].oldFiles.get( path ) if modifyTime is not None: # Record this file as having been seen del self.__dirs[dirName].oldFiles[path] # File's mtime has been changed since we last looked at it. if fileStat.st_mtime > modifyTime: self.__changeLog[path] = 'changed' else: if firstRun: self.__changeLog[path] = 'exists' else: # No recorded modification time, so it must be # a brand new file self.__changeLog[path] = 'created' # Record current mtime of file. self.__dirs[dirName].currentFiles[path] = fileStat.st_mtime def watch_directory( self, namePath, callBack, idxName ): self.__dirs[namePath] = self.Helper( callBack, idxName ) return self def unwatch_directory( self, namePath ): if self.__dirs.get( namePath ): del self.__dirs[namePath] def event_pending( self ): for dirName in self.__dirs.keys(): self.__dirs[dirName].oldFiles = self.__dirs[dirName].currentFiles.copy() self.__dirs[dirName].currentFiles = {} self.__traversal( dirName ) for deletedFile in self.__dirs[dirName].oldFiles.keys(): self.__changeLog[deletedFile] = 'deleted' del self.__dirs[dirName].oldFiles[deletedFile] return len( self.__changeLog ) def handle_events( self ): pathName = self.__changeLog.keys()[0] event = self.__changeLog[pathName] dirName = os.path.dirname( pathName ) self.__dirs[dirName].callBack( pathName, event, self.__dirs[dirName].userdata ) del self.__changeLog[pathName] class FallbackAdaptor: def __init__( self, eventHandler ): self.__fallback = Fallback() self.__eventHandler = eventHandler # callBack function self.__watchHandler = {} # {name : famId} def __del__( self ): if self.__fallback: for handle in self.__watchHandler.keys(): self.stop_watch( handle ) self.__fallback = None def __check_fallback(self): if self.__fallback == None: raise "fallback not init" def watch_directory( self, name, idxName ): self.__check_fallback() if self.__watchHandler.has_key( name ): raise "dir allready watched" # set famId self.__watchHandler[name] = self.__fallback.watch_directory( name, self.__eventHandler, idxName ) return(self.__watchHandler[name]) def watch_file( self, name, idxName ): self.__check_fallback() if self.__watchHandler.has_key( name ): raise "file allready watched" # set famId self.__watchHandler[name] = self.__fallback.watch_directory( name, self.__eventHandler, idxName ) return(self.__watchHandler[name]) def stop_watch( self, name ): self.__check_fallback() if self.__watchHandler.has_key( name ): self.__fallback.unwatch_directory(name) del self.__watchHandler[name] return None def wait_for_event( self ): self.__check_fallback() time.sleep( 1 ) def event_pending( self ): self.__check_fallback() return self.__fallback.event_pending() def handle_events( self ): self.__check_fallback() self.__fallback.handle_events() lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Utils.py0000664000175000017500000001211610772251134021334 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Utility functions" import os, sys, imp, types, string, time import Params from Constants import * g_trace = 0 g_debug = 0 g_error = 0 g_ind_idx = 0 g_ind = ['\\', '|', '/', '-'] "the rotation thing" def test_full(): try: f=open('.waf-full','w') f.write('test') f.close() os.unlink('.waf-full') except IOError, e: import errno if e.errno == errno.ENOSPC: Params.fatal('filesystem full', e.errno) else: Params.fatal(str(e), e.errno) # TODO DEPRECATED: to be removed in waf 1.4 def waf_version(mini = "0.0.1", maxi = "100.0.0"): "throws an exception if the waf version is wrong" min_lst = map(int, mini.split('.')) max_lst = map(int, maxi.split('.')) waf_lst = map(int, Params.g_version.split('.')) mm = min(len(min_lst), len(waf_lst)) for (a, b) in zip(min_lst[:mm], waf_lst[:mm]): if a < b: break if a > b: Params.fatal("waf version should be at least %s (%s found)" % (mini, Params.g_version)) mm = min(len(max_lst), len(waf_lst)) for (a, b) in zip(max_lst[:mm], waf_lst[:mm]): if a > b: break if a < b: Params.fatal("waf version should be at most %s (%s found)" % (maxi, Params.g_version)) def reset(): import Params, Object, Environment Params.g_build = None Object.g_allobjs = [] Environment.g_cache_max = {} #Object.task_gen.mappings = {} #Object.task_gen.mapped = {} def to_list(sth): if type(sth) is types.ListType: return sth else: return sth.split() def options(**kwargs): pass g_loaded_modules = {} "index modules by absolute path" g_module=None "the main module is special" def load_module(file_path, name=WSCRIPT_FILE): "this function requires an absolute path" try: return g_loaded_modules[file_path] except KeyError: pass module = imp.new_module(name) try: file = open(file_path, 'r') except (IOError, OSError): Params.fatal('The file %s could not be opened!' % file_path) import Common d = module.__dict__ d['install_files'] = Common.install_files d['install_as'] = Common.install_as d['symlink_as'] = Common.symlink_as module_dir = os.path.dirname(file_path) sys.path.insert(0, module_dir) exec file in module.__dict__ sys.path.remove(module_dir) if file: file.close() g_loaded_modules[file_path] = module return module def set_main_module(file_path): "Load custom options, if defined" global g_module g_module = load_module(file_path, 'wscript_main') # remark: to register the module globally, use the following: # sys.modules['wscript_main'] = g_module def to_hashtable(s): tbl = {} lst = s.split('\n') for line in lst: if not line: continue mems = line.split('=') tbl[mems[0]] = mems[1] return tbl try: import struct, fcntl, termios except ImportError: def get_term_cols(): return 55 else: def get_term_cols(): dummy_lines, cols = struct.unpack("HHHH", \ fcntl.ioctl(sys.stdout.fileno(),termios.TIOCGWINSZ , \ struct.pack("HHHH", 0, 0, 0, 0)))[:2] return cols def progress_line(state, total, col1, col2): n = len(str(total)) global g_ind, g_ind_idx g_ind_idx += 1 ind = g_ind[g_ind_idx % 4] if hasattr(Params.g_build, 'ini'): ini = Params.g_build.ini else: ini = Params.g_build.ini = time.time() pc = (100.*state)/total eta = time.strftime('%H:%M:%S', time.gmtime(time.time() - ini)) fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind) left = fs % (state, total, col1, pc, col2) right = '][%s%s%s]' % (col1, eta, col2) cols = get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2) if cols < 7: cols = 7 ratio = int((cols*state)/total) - 1 bar = ('='*ratio+'>').ljust(cols) msg = Params.g_progress % (left, bar, right) return msg def split_path(path): "Split path into components. Supports UNC paths on Windows" if sys.platform == 'win32': # splitunc is defined by os.path for Windows only h,t = os.path.splitunc(path) if not h: return __split_dirs(t) return [h] + __split_dirs(t)[1:] else: if not path: return [''] x = path.split('/') if path[0] == '/': x = ['/']+x[1:] return x def __split_dirs(path): h,t = os.path.split(path) if not h: return [t] if h == path: return [h] if not t: return __split_dirs(h) else: return __split_dirs(h) + [t] _quote_define_name_translation = None "lazily construct a translation table for mapping invalid characters to valid ones" def quote_define_name(path): "Converts a string to a constant name, foo/zbr-xpto.h -> FOO_ZBR_XPTO_H" global _quote_define_name_translation if _quote_define_name_translation is None: invalid_chars = [chr(x) for x in xrange(256)] for valid in string.digits + string.uppercase: invalid_chars.remove(valid) _quote_define_name_translation = string.maketrans(''.join(invalid_chars), '_'*len(invalid_chars)) return string.translate(string.upper(path), _quote_define_name_translation) def quote_whitespace(path): return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"') def trimquotes(s): if not s: return '' s = s.rstrip() if s[0] == "'" and s[-1] == "'": return s[1:-1] return s lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Scan.py0000664000175000017500000000741610771525014021127 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) "Scan for dependencies, compute task signatures" try: from hashlib import md5 except ImportError: from md5 import md5 import Params from Params import debug, error g_all_scanners={} "all instances of scanners" class scanner(object): "TODO: call this a dependency manager (not a scanner), as it does scan and compute the signatures" def __init__(self): global g_all_scanners g_all_scanners[self.__class__.__name__] = self self.vars = [] # additional vars to add in the scanning process # ======================================= # # interface definition # this method returns a tuple containing: # * a list of nodes corresponding to real files # * a list of names for files not found in path_lst # the input parameters may have more parameters that the ones used below def scan(self, tsk, node): "usually reimplemented" return ([], []) # scans a node, the task may have additional parameters such as include paths, etc def do_scan(self, tsk, node): "more rarely reimplemented" debug("do_scan(self, node, env, hashparams)", 'ccroot') variant = node.variant(tsk.env()) if not node: error("BUG rescanning a null node") return # we delegate the work to "def scan(self, tsk, node)" to avoid duplicate code (nodes, names) = self.scan(tsk, node) if Params.g_verbose: if Params.g_zones: debug('scanner for %s returned %s %s' % (node.m_name, str(nodes), str(names)), 'deps') tree = Params.g_build tree.m_depends_on[variant][node] = nodes tree.m_raw_deps[variant][node] = names # compute the signature, recompute it if there is no match in the cache def get_signature(self, tsk): "the signature obtained may not be the one if the files have changed, we do it in two steps" tree = Params.g_build env = tsk.env() # assumption: we assume that we can still get the old signature from the signature cache try: node = tsk.m_outputs[0] variant = node.variant(tsk.env()) time = tree.m_tstamp_variants[variant][node] key = hash( (variant, node, time, self.__class__.__name__) ) prev_sig = tree.get_sig_cache(key)[1] except KeyError: prev_sig = Params.sig_nil # we can compute and return the signature if # * the source files have not changed (rescan is 0) # * the computed signature has not changed sig = self.get_signature_queue(tsk) # if the previous signature is the same if sig == prev_sig: return sig #print "scanning the file", tsk.m_inputs[0].abspath() # therefore some source or some header is dirty, rescan the source files for node in tsk.m_inputs: self.do_scan(tsk, node) # recompute the signature and return it sig = self.get_signature_queue(tsk) # DEBUG #print "rescan for ", tsk.m_inputs[0], " is ", rescan, " and deps ", \ # tree.m_depends_on[variant][node], tree.m_raw_deps[variant][node] return sig # ======================================= # # protected methods - override if you know what you are doing def get_signature_queue(self, tsk): "the basic scheme for computing signatures from .cpp and inferred .h files" tree = Params.g_build rescan = 0 seen = [] queue = []+tsk.m_inputs m = md5() # additional variables to hash (command-line defines for example) env = tsk.env() for x in self.vars: m.update(str(env[x])) # add the hashes of all files entering into the dependency system while queue: node = queue.pop(0) if node in seen: continue seen.append(node) # TODO: look at the case of stale nodes and dependencies types variant = node.variant(env) try: queue += tree.m_depends_on[variant][node] except KeyError: pass try: m.update(tree.m_tstamp_variants[variant][node]) except KeyError: return Params.sig_nil return m.digest() lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Action.py0000664000175000017500000000560110771525014021452 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) "Actions are used to build the nodes of most tasks" import re import Object, Params, Runner from Params import debug, fatal g_actions={} "global actions" reg_act = re.compile(r"(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})", re.M) class Action(object): "Base class for all Actions, an action takes a task and produces its outputs" def __init__(self, name, vars=[], func=None, prio=100, color='GREEN'): """If the action is simple, func is not defined, else a function can be attached and will be launched instead of running the string generated by 'setstr' see Runner for when this is used - a parameter is given, it is the task. Each action must name""" self.m_name = name # variables triggering a rebuild self.m_vars = vars self.m_function_to_run = func self.m_color = color self.prio = prio global g_actions if name in g_actions: debug('overriding action: %s' % name, 'action') g_actions[name] = self debug("action added: %s" % name, 'action') def __str__(self): return self.m_name def get_str(self, task): "string to display to the user" env = task.env() src_str = " ".join([a.nice_path(env) for a in task.m_inputs]) tgt_str = " ".join([a.nice_path(env) for a in task.m_outputs]) return "* %s : %s -> %s" % (self.m_name, src_str, tgt_str) def run(self, task): "run the compilation" f = self.m_function_to_run if not f: fatal("Action %s has no function!" % self.m_name) return f(task) def funex(c): exec(c) return f def simple_action(name, line, color='GREEN', vars=[], prio=100): """Compiles a string (once) into an Action instance, eg: simple_action('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].m_parent.bldpath()}') The env variables (CXX, ..) on the task must not hold dicts (order) The reserved keywords TGT and SRC represent the task input and output nodes """ extr = [] def repl(match): g = match.group if g('dollar'): return "$" elif g('subst'): extr.append((g('var'), g('code'))); return "%s" return None line = reg_act.sub(repl, line) parm = [] dvars = [] app = parm.append for (var, meth) in extr: if var == 'SRC': if meth: app('task.m_inputs%s' % meth) else: app('" ".join([a.srcpath(env) for a in task.m_inputs])') elif var == 'TGT': if meth: app('task.m_outputs%s' % meth) else: app('" ".join([a.bldpath(env) for a in task.m_outputs])') else: if not var in dvars: dvars.append(var) app("p('%s')" % var) if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm)) else: parm = '' c = ''' def f(task): env = task.env() p = env.get_flat try: cmd = "%s" %s except Exception: task.debug(); raise return Runner.exec_command(cmd) ''' % (line, parm) debug(c, 'action') act = Action(name, prio=prio, color=color) act.m_function_to_run = funex(c) act.m_vars = vars or dvars return act lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Options.py0000664000175000017500000001341610772255614021702 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Scott Newton, 2005 (scottn) # Thomas Nagy, 2006 (ita) "Custom command-line options" import os, sys, imp, types, tempfile from optparse import OptionParser import Params, Utils from Params import debug, fatal, warning, error from Constants import * # Such a command-line should work: PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure default_prefix = os.environ.get('PREFIX') if not default_prefix: if sys.platform == 'win32': default_prefix=tempfile.gettempdir() else: default_prefix = '/usr/local/' default_destdir = os.environ.get('DESTDIR', '') def create_parser(): debug("create_parser is called", 'options') parser = OptionParser(usage = """waf [options] [commands ...] * Main commands: configure build install clean dist distclean uninstall distcheck * Example: ./waf build -j4""", version = 'waf %s' % Params.g_version) p=parser.add_option p('-j', '--jobs', type = 'int', default = 1, help = 'specify the number of parallel jobs [Default: 1]', dest = 'jobs') p('', '--daemon', action = 'store_true', default = False, help = 'run as a daemon [Default: False]', dest = 'daemon') p('-f', '--force', action = 'store_true', default = False, help = 'force the files installation', dest = 'force') p('-k', '--keep', action = 'store_true', default = False, help = 'keep running happily on independant task groups', dest = 'keep') p('-p', '--progress', action = 'count', default = 0, help = '-p: progress bar; -pp: ide output', dest = 'progress_bar') p('-v', '--verbose', action = 'count', default = 0, help = 'show verbose output [Default: False]', dest = 'verbose') p('--prefix', help = "installation prefix [Default: '%s']" % default_prefix, default = default_prefix, dest = 'prefix') p('--destdir', help = "installation root [Default: '%s']" % default_destdir, default = default_destdir, dest = 'destdir') p('--nocache', action = 'store_true', default = False, help = 'compile everything, even if WAFCACHE is set', dest = 'nocache') if 'configure' in sys.argv: p('-b', '--blddir', action = 'store', default = '', help = 'build dir for the project (configuration)', dest = 'blddir') p('-s', '--srcdir', action = 'store', default = '', help = 'src dir for the project (configuration)', dest = 'srcdir') p('--zones', action = 'store', default = '', help = 'debugging zones', dest = 'zones') p('--targets', action = 'store', default = '', help = 'compile the targets given only [targets in CSV format, e.g. "target1,target2"]', dest = 'compile_targets') return parser def parse_args_impl(parser, _args=None): (Params.g_options, args) = parser.parse_args(args=_args) #print Params.g_options, " ", args # By default, 'waf' is equivalent to 'waf build' lst='dist configure clean distclean build install uninstall check distcheck'.split() Params.g_commands = {} for var in lst: Params.g_commands[var] = 0 if len(args) == 0: Params.g_commands['build'] = 1 # Parse the command arguments for arg in args: arg = arg.strip() if arg in lst: Params.g_commands[arg]=True else: print 'Error: Invalid command specified ',arg parser.print_help() sys.exit(1) if Params.g_commands['check']: Params.g_commands['build'] = True if Params.g_commands['install'] or Params.g_commands['uninstall']: Params.g_install = 1 # TODO -k => -j0 if Params.g_options.keep: Params.g_options.jobs = 1 Params.g_verbose = Params.g_options.verbose Params.g_zones = Params.g_options.zones.split(',') if Params.g_verbose>1: Params.set_trace(1,1,1) else: Params.set_trace(0,0,1) class Handler(object): "loads wscript modules in folders for adding options" def __init__(self): self.parser = create_parser() self.cwd = os.getcwd() global g_parser g_parser = self def add_option(self, *kw, **kwargs): self.parser.add_option(*kw, **kwargs) def add_option_group(self, *args, **kwargs): return self.parser.add_option_group(*args, **kwargs) def get_option_group(self, opt_str): return self.parser.get_option_group(opt_str) def sub_options(self, dir, option_group=None): """set options defined by wscripts: - run by Scripting to set the options defined by main wscript. - run by wscripts to set options in sub directories.""" try: current = self.cwd self.cwd = os.path.join(self.cwd, dir) cur = os.path.join(self.cwd, WSCRIPT_FILE) debug("cur is "+str(cur), 'options') try: mod = Utils.load_module(cur) except AttributeError: msg = "no module was found for wscript (sub_options)\n[%s]:\n * make sure such a function is defined \n * run configure from the root of the project" fatal(msg % self.cwd) try: fun = mod.set_options except AttributeError: msg = "no set_options function was found in wscript\n[%s]:\n * make sure such a function is defined \n * run configure from the root of the project" fatal(msg % self.cwd) else: fun(option_group or self) finally: self.cwd = current def tool_options(self, tool, tooldir=None, option_group=None): if type(tool) is types.ListType: for i in tool: self.tool_options(i, tooldir, option_group) return if not tooldir: tooldir = Params.g_tooldir tooldir = Utils.to_list(tooldir) try: file,name,desc = imp.find_module(tool, tooldir) except ImportError: fatal("no tool named '%s' found" % tool) module = imp.load_module(tool,file,name,desc) try: fun = module.set_options except AttributeError: warning("tool %s has no function set_options" % tool) else: fun(option_group or self) def parse_args(self, args=None): parse_args_impl(self.parser, args) g_parser = None "Last Handler instance in use" lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Build.py0000664000175000017500000004534410772255614021313 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) """ Dependency tree holder The class Build holds all the info related to a build: * file system representation (tree of Node instances) * various cached objects (task signatures, file scan results, ..) There is only one Build object at a time (Params.g_build singleton) """ import os, sys, cPickle, types, imp, errno, re import Params, Runner, Object, Node, Scripting, Utils, Environment, Task from Params import debug, error, fatal, warning from Constants import * SAVED_ATTRS = 'm_root m_srcnode m_bldnode m_tstamp_variants m_depends_on m_raw_deps m_sig_cache'.split() "Build class members to save" g_modcache = {} "Cache for the tools (modules), re-importing raises errors" class BuildError(Exception): def __init__(self, b=None, t=[]): self.bld = b self.tasks = t self.ret = 1 def get_message(self): lst = ['Build failed'] for tsk in self.tasks: if tsk.m_hasrun == Runner.crashed: try: lst.append(" -> task failed (err #%d): %s" % (tsk.err_code, str(tsk.m_outputs))) except AttributeError: lst.append(" -> task failed:" % str(tsk.m_outputs)) elif tsk.m_hasrun == Runner.missing: lst.append(" -> missing files: %s" % str(tsk.m_outputs)) return '\n'.join(lst) class BuildDTO(object): "holds the data to store using cPickle" def __init__(self): pass def init(self, bdobj): global SAVED_ATTRS for a in SAVED_ATTRS: setattr(self, a, getattr(bdobj, a)) def update_build(self, bdobj): global SAVED_ATTRS for a in SAVED_ATTRS: setattr(bdobj, a, getattr(self, a)) class Build(object): "holds the dependency tree" def __init__(self): # dependency tree self._init_data() # ======================================= # # globals # map a name to an environment, the 'default' must be defined self.m_allenvs = {} # there should be only one build dir in use at a time Params.g_build = self # ======================================= # # code for reading the scripts # project build directory - do not reset() from load_dirs() or _init_data() self.m_bdir = '' # the current directory from which the code is run # the folder changes everytime a wscript is read self.m_curdirnode = None # temporary holding the subdirectories containing scripts - look in Scripting.py self.m_subdirs = [] # ======================================= # # cache variables # local cache for absolute paths - m_abspath_cache[variant][node] self.m_abspath_cache = {} # local cache for relative paths # two nodes - hashtable of hashtables - g_relpath_cache[child][parent]) self._relpath_cache = {} # list of folders that are already scanned # so that we do not need to stat them one more time self.m_scanned_folders = [] # file contents self._cache_node_content = {} # list of targets to uninstall for removing the empty folders after uninstalling self.m_uninstall = [] # ======================================= # # tasks and objects # build dir variants (release, debug, ..) for name in ['default', 0]: for v in 'm_tstamp_variants m_depends_on m_sig_cache m_raw_deps m_abspath_cache'.split(): var = getattr(self, v) if not name in var: var[name] = {} # TODO used by xmlwaf self.pushed = [] def _init_data(self): debug("init data called", 'build') # filesystem root - root name is Params.g_rootname self.m_root = Node.Node('', None) # source directory self.m_srcnode = None # build directory self.m_bldnode = None # TODO: this code does not look too good # nodes signatures: self.m_tstamp_variants[variant_name][node] = signature_value self.m_tstamp_variants = {} # one node has nodes it depends on, tasks cannot be stored # self.m_depends_on[variant][node] = [node1, node2, ..] self.m_depends_on = {} # results of a scan: self.m_raw_deps[variant][node] = [filename1, filename2, filename3] # for example, find headers in c files self.m_raw_deps = {} self.m_sig_cache = {} self.task_manager = Task.TaskManager() # load existing data structures from the disk (stored using self._store()) def _load(self): cachedir = Params.g_cachedir code = '' try: file = open(os.path.join(cachedir, 'build.config.py'), 'r') code = file.read() file.close() except (IOError, OSError): # TODO load the pickled file and the environments better pass else: re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) for m in re_imp.finditer(code): g = m.group if g(2) == 'version': if eval(g(3)) < HEXVERSION: Params.fatal('Version mismatch! reconfigure the project') elif g(2) == 'tools': lst = eval(g(3)) for t in lst: self.setup(**t) try: file = open(os.path.join(self.m_bdir, DBFILE), 'rb') dto = cPickle.load(file) dto.update_build(self) file.close() except IOError: debug("resetting the build object (dto failed)", 'build') self._init_data() if Params.g_verbose>2: self.dump() # store the data structures on disk, retrieve with self._load() def _store(self): file = open(os.path.join(self.m_bdir, DBFILE), 'wb') dto = BuildDTO() dto.init(self) cPickle.dump(dto, file, -1) # remove the '-1' for unoptimized version file.close() # ======================================= # def save(self): self._store() def clean(self): debug("clean called", 'build') def clean_rec(node): for x in node.m_build_lookup: nd = node.m_build_lookup[x] for env in self.m_allenvs.values(): pt = nd.abspath(env) # do not remove config files if pt in env['waf_config_files']: continue try: os.remove(pt) except OSError: pass for x in node.m_dirs_lookup: nd = node.m_dirs_lookup[x] clean_rec(nd) clean_rec(self.m_srcnode) def compile(self): debug("compile called", 'build') os.chdir(self.m_bdir) Object.flush() if Params.g_verbose>2: self.dump() self.task_manager.flush() if Params.g_options.jobs <= 1: executor = Runner.Serial(self) else: executor = Runner.Parallel(self, Params.g_options.jobs) # TODO clean self.generator = executor def dw(): if Params.g_options.progress_bar: sys.stdout.write(Params.g_cursor_on) debug('executor starting', 'build') try: if Params.g_options.progress_bar: sys.stdout.write(Params.g_cursor_off) ret = executor.start() except KeyboardInterrupt, e: dw() os.chdir(self.m_srcnode.abspath()) self._store() Params.pprint('RED', 'Build interrupted') if Params.g_verbose > 1: raise else: sys.exit(68) except Exception, e: dw() # do not store anything, for something bad happened raise else: dw() self._store() if ret: Utils.test_full() raise BuildError(self, self.task_manager.tasks_done) if Params.g_verbose>2: self.dump() os.chdir(self.m_srcnode.abspath()) def install(self): "this function is called for both install and uninstall" debug('install called', 'build') Object.flush() for obj in Object.g_allobjs: if obj.m_posted: obj.install() # remove empty folders after uninstalling if Params.g_commands['uninstall']: lst = [] for x in self.m_uninstall: dir = os.path.dirname(x) if not dir in lst: lst.append(dir) lst.sort() lst.reverse() nlst = [] for y in lst: x = y while len(x) > 4: if not x in nlst: nlst.append(x) x = os.path.dirname(x) nlst.sort() nlst.reverse() for x in nlst: try: os.rmdir(x) except OSError: pass def add_subdirs(self, dirs): for dir in Utils.to_list(dirs): if dir: Scripting.add_subdir(dir, self) def create_obj(self, objname, *k, **kw): try: return Object.task_gen.classes[objname](*k, **kw) except KeyError: raise KeyError("'%s' is not a valid build tool -> %s" % (objname, [x for x in Object.task_gen.classes])) def load_envs(self): cachedir = Params.g_cachedir try: lst = os.listdir(cachedir) except OSError, e: if e.errno == errno.ENOENT: fatal('The project was not configured: run "waf configure" first!') else: raise if not lst: fatal('The cache directory is empty: reconfigure the project') for file in lst: if file.endswith(CACHE_SUFFIX): env = Environment.Environment() env.load(os.path.join(cachedir, file)) name = file.split('.')[0] self.m_allenvs[name] = env self._initialize_variants() for env in self.m_allenvs.values(): for f in env['dep_files']: newnode = self.m_srcnode.find_build(f, create=1) try: hash = Params.h_file(newnode.abspath(env)) except (IOError, AttributeError): error("cannot find "+f) hash = Params.sig_nil self.m_tstamp_variants[env.variant()][newnode] = hash def setup(self, tool, tooldir=None): "setup tools for build process" if type(tool) is types.ListType: for i in tool: self.setup(i, tooldir) return if not tooldir: tooldir = Params.g_tooldir file = None key = str((tool, tooldir)) module = g_modcache.get(key, None) if not module: file,name,desc = imp.find_module(tool, tooldir) module = imp.load_module(tool,file,name,desc) g_modcache[key] = module if hasattr(module, "setup"): module.setup(self) if file: file.close() def _initialize_variants(self): debug("init variants", 'build') lstvariants = [] for env in self.m_allenvs.values(): if not env.variant() in lstvariants: lstvariants.append(env.variant()) self._variants = lstvariants debug("list of variants is "+str(lstvariants), 'build') for name in lstvariants+[0]: for v in 'm_tstamp_variants m_depends_on m_raw_deps m_abspath_cache'.split(): var = getattr(self, v) if not name in var: var[name] = {} # ======================================= # # node and folder handling # this should be the main entry point def load_dirs(self, srcdir, blddir, isconfigure=None): "this functions should be the start of everything" # there is no reason to bypass this check try: if srcdir == blddir or os.path.abspath(srcdir) == os.path.abspath(blddir): fatal("build dir must be different from srcdir ->"+str(srcdir)+" ->"+str(blddir)) except OSError: pass # set the source directory if not os.path.isabs(srcdir): srcdir = os.path.join(os.path.abspath('.'),srcdir) # set the build directory it is a path, not a node (either absolute or relative) if not os.path.isabs(blddir): self.m_bdir = os.path.abspath(blddir) else: self.m_bdir = blddir if not isconfigure: self._load() if self.m_srcnode: self.m_curdirnode = self.m_srcnode return self.m_srcnode = self.ensure_dir_node_from_path(srcdir) debug("srcnode is %s and srcdir %s" % (str(self.m_srcnode), srcdir), 'build') self.m_curdirnode = self.m_srcnode self.m_bldnode = self.ensure_dir_node_from_path(self.m_bdir) # create this build dir if necessary try: os.makedirs(blddir) except OSError: pass self._initialize_variants() def ensure_dir_node_from_path(self, abspath): "return a node corresponding to an absolute path, creates nodes if necessary" debug('ensure_dir_node_from_path %s' % (abspath), 'build') plst = Utils.split_path(abspath) curnode = self.m_root # root of the tree for dirname in plst: if not dirname: continue if dirname == '.': continue found = curnode.get_dir(dirname, None) if not found: found = Node.Node(dirname, curnode) curnode.append_dir(found) curnode = found return curnode def rescan(self, src_dir_node): """ first list the files in the src dir and update the nodes - for each variant build dir (multiple build dirs): - list the files in the build dir, update the nodes this makes (n bdirs)+srdir to scan (at least 2 folders) so we might want to do it in parallel in some future """ # FIXME use sets with intersection and union # do not rescan over and over again if src_dir_node.hash_value in self.m_scanned_folders: return # do not rescan the nodes above srcnode if src_dir_node.height() < self.m_srcnode.height(): return #debug("rescanning "+str(src_dir_node), 'build') # TODO undocumented hook if hasattr(self, 'repository'): self.repository(src_dir_node) # list the files in the src directory, adding the signatures files = self.scan_src_path(src_dir_node, src_dir_node.abspath(), src_dir_node.files()) #debug("files found in folder are "+str(files), 'build') src_dir_node.m_files_lookup = {} for i in files: src_dir_node.m_files_lookup[i.m_name] = i # list the files in the build dirs # remove the existing timestamps if the build files are removed # first obtain the differences between srcnode and src_dir_node #lst = self.m_srcnode.difflst(src_dir_node) h1 = self.m_srcnode.height() h2 = src_dir_node.height() lst = [] child = src_dir_node while h2 > h1: lst.append(child.m_name) child = child.m_parent h2 -= 1 lst.reverse() for variant in self._variants: sub_path = os.path.join(self.m_bldnode.abspath(), variant , *lst) try: files = self.scan_path(src_dir_node, sub_path, src_dir_node.m_build_lookup.values(), variant) src_dir_node.m_build_lookup = {} for i in files: src_dir_node.m_build_lookup[i.m_name] = i except OSError: #debug("osError on " + sub_path, 'build') # listdir failed, remove all sigs of nodes dict = self.m_tstamp_variants[variant] for node in src_dir_node.m_build_lookup.values(): if node in dict: dict.__delitem__(node) os.makedirs(sub_path) src_dir_node.m_build_lookup = {} self.m_scanned_folders.append(src_dir_node.hash_value) # ======================================= # def scan_src_path(self, i_parent_node, i_path, i_existing_nodes): try: # read the dir contents, ignore the folders in it l_names_read = os.listdir(i_path) except OSError: warning("OSError exception in scan_src_path() i_path=%s" % str(i_path) ) return None debug("folder contents "+str(l_names_read), 'build') # there are two ways to obtain the partitions: # 1 run the comparisons two times (not very smart) # 2 reduce the sizes of the list while looping l_names = l_names_read l_nodes = i_existing_nodes l_kept = [] for node in l_nodes: i = 0 name = node.m_name l_len = len(l_names) while i < l_len: if l_names[i] == name: l_kept.append(node) break i += 1 if i < l_len: del l_names[i] # Now: # l_names contains the new nodes (or files) # l_kept contains only nodes that actually exist on the filesystem for node in l_kept: try: # update the time stamp self.m_tstamp_variants[0][node] = Params.h_file(node.abspath()) except IOError: fatal("a file is readonly or has become a dir "+node.abspath()) debug("new files found "+str(l_names), 'build') l_path = i_path + os.sep for name in l_names: try: # throws IOError if not a file or if not readable st = Params.h_file(l_path + name) except IOError: continue l_child = Node.Node(name, i_parent_node) self.m_tstamp_variants[0][l_child] = st l_kept.append(l_child) return l_kept def scan_path(self, i_parent_node, i_path, i_existing_nodes, i_variant): """in this function we do not add timestamps but we remove them when the files no longer exist (file removed in the build dir)""" # read the dir contents, ignore the folders in it l_names_read = os.listdir(i_path) # there are two ways to obtain the partitions: # 1 run the comparisons two times (not very smart) # 2 reduce the sizes of the list while looping l_names = l_names_read l_nodes = i_existing_nodes l_rm = [] for node in l_nodes: i = 0 name = node.m_name l_len = len(l_names) while i < l_len: if l_names[i] == name: break i += 1 if i < l_len: del l_names[i] else: l_rm.append(node) # remove the stamps of the nodes that no longer exist in the build dir for node in l_rm: #print "\nremoving the timestamp of ", node, node.m_name #print node.m_parent.m_build #print l_names_read #print l_names if node in self.m_tstamp_variants[i_variant]: self.m_tstamp_variants[i_variant].__delitem__(node) return l_nodes def dump(self): "for debugging" def printspaces(count): if count > 0: return printspaces(count - 1) + "-" return "" def recu(node, count): accu = printspaces(count) accu += "> "+node.m_name+" (d)\n" for child in node.files(): accu += printspaces(count) accu += '-> '+child.m_name+' ' for variant in self.m_tstamp_variants: #print "variant %s"%variant var = self.m_tstamp_variants[variant] #print var if child in var: accu+=' [%s,%s] ' % (str(variant), Params.view_sig(var[child])) accu+='\n' #accu+= ' '+str(child.m_tstamp)+'\n' # TODO #if node.files()[file].m_newstamp != node.files()[file].m_oldstamp: accu += "\t\t\t(modified)" #accu+= node.files()[file].m_newstamp + "< >" + node.files()[file].m_oldstamp + "\n" for child in node.m_build_lookup.values(): accu+= printspaces(count) accu+= '-> '+child.m_name+' (b) ' for variant in self.m_tstamp_variants: #print "variant %s"%variant var = self.m_tstamp_variants[variant] #print var if child in var: accu+=' [%s,%s] ' % (str(variant), Params.view_sig(var[child])) accu+='\n' #accu+= ' '+str(child.m_tstamp)+'\n' # TODO #if node.files()[file].m_newstamp != node.files()[file].m_oldstamp: accu += "\t\t\t(modified)" #accu+= node.files()[file].m_newstamp + "< >" + node.files()[file].m_oldstamp + "\n" for dir in node.dirs(): accu += recu(dir, count+1) return accu Params.pprint('CYAN', recu(self.m_root, 0) ) Params.pprint('CYAN', 'size is '+str(self.m_root.size_subtree())) #keys = self.m_name2nodes.keys() #for k in keys: # print k, '\t\t', self.m_name2nodes[k] def pushdir(self, dir): node = self.m_curdirnode.ensure_node_from_lst(Utils.split_path(dir)) self.pushed = [self.m_curdirnode]+self.pushed self.m_curdirnode = node def popdir(self): self.m_curdirnode = self.pushed.pop(0) def env_of_name(self, name): if not name: error('env_of_name called with no name!') return None try: return self.m_allenvs[name] except KeyError: error('no such environment'+name) return None def env(self, name='default'): return self.env_of_name(name) def add_group(self, name=''): Object.flush(all=0) self.task_manager.add_group(name) def add_manual_dependency(self, path, value): h = getattr(self, 'deps_man', {}) node = self.m_curdirnode.find_source(path) if not node: node = self.m_curdirnode.find_build(path, create=1) h[node] = value self.deps_man = h def set_sig_cache(self, key, val): self.m_sig_cache[key] = val def get_sig_cache(self, key): try: return self.m_sig_cache[key] except KeyError: s = Params.sig_nil return [s, s, s, s, s] def launch_node(self): try: return self._launch_node except AttributeError: self._launch_node = self.m_root.find_dir(Params.g_cwd_launch) return self._launch_node lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Runner.py0000664000175000017500000002403210771525014021505 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Execute the tasks" import sys, random, time, threading, Queue, traceback import Params, Utils import pproc as subprocess from Params import debug, error g_quiet = 0 "do not output anything" missing = 1 crashed = 2 skipped = 8 success = 9 def printout(s): sys.stdout.write(s); sys.stdout.flush() def progress_line(state, total, col1, task, col2): "do not print anything if there is nothing to display" if Params.g_options.progress_bar == 1: return Utils.progress_line(state, total, col1, col2) if Params.g_options.progress_bar == 2: try: ini = Params.g_build.ini except AttributeError: ini = Params.g_build.ini = time.time() ela = time.strftime('%H:%M:%S', time.gmtime(time.time() - ini)) ins = ','.join([n.m_name for n in task.m_inputs]) outs = ','.join([n.m_name for n in task.m_outputs]) return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (total, state, ins, outs, ela) n = len(str(total)) fs = "[%%%dd/%%%dd] %%s%%s%%s\n" % (n, n) return fs % (state, total, col1, task.get_display(), col2) def process_cmd_output(cmd_stdout, cmd_stderr): stdout_eof = stderr_eof = 0 while not (stdout_eof and stderr_eof): if not stdout_eof: s = cmd_stdout.read() if not s: stdout_eof = 1 elif not g_quiet: printout(s) if not stderr_eof: s = cmd_stderr.read() if not s: stderr_eof = 1 elif not g_quiet: sys.stderr.write('\n') sys.stderr.write(s) #time.sleep(0.1) def exec_command_normal(s): "run commands in a portable way the subprocess module backported from python 2.4 and should work on python >= 2.2" debug("system command -> "+ s, 'runner') if Params.g_verbose: print s # encase the command in double-quotes in windows if sys.platform == 'win32' and not s.startswith('""'): s = '"%s"' % s proc = subprocess.Popen(s, shell=1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) process_cmd_output(proc.stdout, proc.stderr) stat = proc.wait() if stat & 0xff: return stat | 0x80 return stat >> 8 def exec_command_interact(s): "this one is for the latex output, where we cannot capture the output while the process waits for stdin" debug("system command (interact) -> "+ s, 'runner') if Params.g_verbose: print s # encase the command in double-quotes in windows if sys.platform == 'win32' and not s.startswith('""'): s = '"%s"' % s proc = subprocess.Popen(s, shell=1) stat = proc.wait() if stat & 0xff: return stat | 0x80 return stat >> 8 exec_command = exec_command_interact # python bug on stdout overload def set_exec(mode): global exec_command if mode == 'normal': exec_command = exec_command_normal elif mode == 'noredir': exec_command = exec_command_interact else: error('set_runner_mode') class Serial(object): def __init__(self, bld): self.error = 0 self.manager = bld.task_manager self.curgroup = 0 self.curprio = -1 self.outstanding = [] # list of tasks in the current priority self.priolst = [] # progress bar self.total = self.manager.total() self.processed = 0 self.switchflag = 1 # postpone # self.manager.debug() # warning, this one is recursive .. def get_next(self): if self.outstanding: t = self.outstanding.pop(0) self.processed += 1 return t # handle case where only one wscript exist # that only install files if not self.manager.groups: return None # stop condition if self.curgroup >= len(self.manager.groups): return None # increase the priority value self.curprio += 1 # there is no current list group = self.manager.groups[self.curgroup] if self.curprio >= len(group.prio.keys()): self.curprio = -1 self.curgroup += 1 return self.get_next() # sort keys if necessary if self.curprio == 0: self.priolst = group.prio.keys() self.priolst.sort() # now fill outstanding id = self.priolst[self.curprio] self.outstanding = group.prio[id] if Params.g_verbose: debug("Preparing to run prio %i tasks: [\n%s\n\t]" % (id, ',\n'.join(["\t#%i: %s" % (tsk.m_idx, repr(tsk).strip()) for tsk in self.outstanding])), 'runner') return self.get_next() def progress(self): return (self.processed, self.total) def postpone(self, tsk): self.processed -= 1 # shuffle the list - why it does work is left as an exercise for the reader self.switchflag *= -1 if self.switchflag>0: self.outstanding.insert(0, tsk) else: self.outstanding.append(tsk) # TODO FIXME def debug(self): debug("debugging a task: something went wrong:", 'runner') s = " ".join([str(t.m_idx) for t in self.manager]) debug(s, 'runner') # skip a group and report the failure def skip_group(self): self.curgroup += 1 self.curprio = -1 self.outstanding = [] try: self.manager.groups[self.curgroup].prio.sort() except KeyError: pass def start(self): global g_quiet debug("Serial start called", 'runner') #self.debug() while 1: # get next Task tsk = self.get_next() if tsk is None: break debug("retrieving #%i (%r)" % (tsk.m_idx, tsk), 'runner') # # ======================= #if tsk.m_hasrun: # error("task has already run! "+str(tsk.m_idx)) if not tsk.may_start(): debug("delaying #"+str(tsk.m_idx), 'runner') self.postpone(tsk) #self.debug() #tsk = None continue # # ======================= tsk.prepare() #tsk.debug() #debug("m_sig is "+str(tsk.m_sig), 'runner') #debug("obj output m_sig is "+str(tsk.m_outputs[0].get_sig()), 'runner') #continue if not tsk.must_run(): tsk.m_hasrun = skipped self.manager.add_finished(tsk) #debug("task is up-to_date "+str(tsk.m_idx), 'runner') continue debug("executing #"+str(tsk.m_idx), 'runner') # display the command that we are about to run if not g_quiet: (s, t) = self.progress() cl = Params.g_colors printout(progress_line(s, t, cl[tsk.color()], tsk, cl['NORMAL'])) # run the command ret = tsk.run() self.manager.add_finished(tsk) # non-zero means something went wrong if ret: self.error = 1 tsk.m_hasrun = crashed tsk.err_code = ret if Params.g_options.keep: continue else: return -1 try: tsk.update_stat() except OSError: traceback.print_stack() self.error = 1 tsk.m_hasrun = missing if Params.g_options.keep: continue else: return -1 else: tsk.m_hasrun = success if self.error: return -1 class TaskConsumer(threading.Thread): def __init__(self, i, m): threading.Thread.__init__(self) self.setDaemon(1) self.id = i self.master = m self.start() def run(self): do_stat = getattr(self, 'do_stat', None) m = self.master while 1: tsk = m.ready.get() if m.failed and not m.running: m.out.put(tsk) continue if do_stat: do_stat(1) printout(tsk.get_display()) ret = tsk.run() if do_stat: do_stat(-1) if ret: tsk.err_code = ret tsk.m_hasrun = crashed else: try: tsk.update_stat() except OSError: tsk.m_hasrun = missing else: tsk.m_hasrun = success if tsk.m_hasrun != success: # TODO for now, do no keep running in parallel and not Params.g_options.keep: m.failed = 1 m.out.put(tsk) class Parallel(object): """ The following is a small scheduler for making as many tasks available to the consumer threads It uses the serial shuffling system """ def __init__(self, bld, j=2): # number of consumers self.numjobs = j self.manager = bld.task_manager # progress bar self.total = self.manager.total() self.processed = 0 # tasks waiting to be processed - IMPORTANT self.outstanding = [] # tasks that are awaiting for another task to complete self.frozen = [] # tasks waiting to be run by the consumers self.ready = Queue.Queue(0) self.out = Queue.Queue(0) self.count = 0 # tasks not in the producer area self.failed = 0 # some task has failed self.running = 0 # keep running ? self.progress = 0 # progress indicator self.curgroup = 0 self.curprio = -1 self.priolst = [] def get_next_prio(self): # stop condition if self.curgroup >= len(self.manager.groups): return (None, None) # increase the priority value self.curprio += 1 # there is no current list group = self.manager.groups[self.curgroup] if self.curprio >= len(group.prio.keys()): self.curprio = -1 self.curgroup += 1 return self.get_next_prio() # sort keys if necessary if self.curprio == 0: self.priolst = group.prio.keys() self.priolst.sort() id = self.priolst[self.curprio] return (id, group.prio[id]) def start(self): for i in range(self.numjobs): TaskConsumer(i, self) # the current group #group = None def get_out(): self.manager.add_finished(self.out.get()) self.count -= 1 lastfailput = 0 # iterate over all tasks at most one time for each task run penalty = 0 currentprio = 0 #loop=0 while 1: #loop += 1 if self.failed and not self.running: while self.count > 0: get_out() if self.failed: return -1 if 1 == currentprio % 2: # allow only one process at a time in priority 'even' while self.count > 0: get_out() else: # not too many jobs in the queue while self.count > self.numjobs + 10: get_out() # empty the returned tasks as much as possible while not self.out.empty(): get_out() if not self.outstanding: if self.count > 0: get_out() self.outstanding = self.frozen self.frozen = [] if not self.outstanding: while self.count > 0: get_out() (currentprio, self.outstanding) = self.get_next_prio() #if self.outstanding: random.shuffle(self.outstanding) if currentprio is None: break # consider the next task tsk = self.outstanding.pop(0) if tsk.may_start(): tsk.prepare() self.progress += 1 if not tsk.must_run(): tsk.m_hasrun = skipped self.manager.add_finished(tsk) continue cl = Params.g_colors tsk.set_display(progress_line(self.progress, self.total, cl[tsk.color()], tsk, cl['NORMAL'])) self.count += 1 self.ready.put(tsk) else: if random.randint(0,1): self.frozen.insert(0, tsk) else: self.frozen.append(tsk) #print loop lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Object.py0000664000175000017500000003443610772264157021464 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 import sys if sys.hexversion < 0x020400f0: from sets import Set as set #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) """ The class task_gen encapsulates the creation of task objects (low-level code) The instances can have various parameters, but the creation of task nodes is delayed. To achieve this, various methods are called from the method "apply" The class task_gen contains lots of methods, and a configuration table: * the methods to call (self.meths) can be specified dynamically (removing, adding, ..) * the order of the methods (self.prec or by default task_gen.prec) is configurable * new methods can be inserted dynamically without pasting old code Additionally, task_gen provides the method apply_core * file extensions are mapped to methods: def meth(self, name_or_node) * if a mapping is not found in self.mappings, it is searched in task_gen.mappings * when called, the functions may modify self.allnodes to re-add source to process * the mappings can map an extension or a filename (see the code below) WARNING 1 subclasses must reimplement the clone method to avoid problems with 'deepcopy' WARNING 2 find a new name for this file (naming it 'Object' was never a good idea) """ import os, types, traceback, sys, copy import Params, Task, Common, Node, Utils, Action from Params import debug, error, fatal typos = { 'sources':'source', 'targets':'target', 'include':'includes', 'define':'defines', 'importpath':'importpaths', 'install_var':'inst_var', 'install_subdir':'inst_dir', } g_allobjs = [] "contains all objects, provided they are created (not in distclean or in dist)" #TODO part of the refactoring to eliminate the static stuff (Utils.reset) g_name_to_obj = {} def name_to_obj(name): global g_name_to_obj if not g_name_to_obj: for x in g_allobjs: if x.name: g_name_to_obj[x.name] = x elif not x.target in g_name_to_obj.keys(): g_name_to_obj[x.target] = x return g_name_to_obj.get(name, None) def flush(all=1): "object instances under the launch directory create the tasks now" global g_allobjs global g_name_to_obj # force the initialization of the mapping name->object in flush # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping g_name_to_obj = {} name_to_obj(None) tree = Params.g_build debug("delayed operation Object.flush() called", 'object') # post only objects below a particular folder (recursive make behaviour) launch_dir_node = tree.m_root.find_dir(Params.g_cwd_launch) if launch_dir_node.is_child_of(tree.m_bldnode): launch_dir_node = tree.m_srcnode if not launch_dir_node.is_child_of(tree.m_srcnode): launch_dir_node = tree.m_srcnode if Params.g_options.compile_targets: debug('posting objects listed in compile_targets', 'object') # ensure the target names exist, fail before any post() targets_objects = {} for target_name in Params.g_options.compile_targets.split(','): # trim target_name (handle cases when the user added spaces to targets) target_name = target_name.strip() targets_objects[target_name] = name_to_obj(target_name) if all and not targets_objects[target_name]: fatal("target '%s' does not exist" % target_name) for target_obj in targets_objects.values(): if target_obj and not target_obj.m_posted: target_obj.post() else: debug('posting objects (normal)', 'object') for obj in g_allobjs: if launch_dir_node and not obj.path.is_child_of(launch_dir_node): continue if not obj.m_posted: obj.post() class register_obj(type): """no decorators for classes, so we use a metaclass we store into task_gen.classes the classes that inherit task_gen and whose names end in 'obj' """ def __init__(cls, name, bases, dict): super(register_obj, cls).__init__(name, bases, dict) name = cls.__name__ if name != 'task_gen' and not name.endswith('_abstract'): task_gen.classes[name.replace('_taskgen', '')] = cls class task_gen(object): """ Most methods are of the form 'def meth(self):' without any parameters there are many of them, and they do many different things: * task creation * task results installation * environment modification * attribute addition/removal The inheritance approach is complicated * mixing several languages at once * subclassing is needed even for small changes * inserting new methods is complicated This new class uses a configuration table: * adding new methods easily * obtaining the order in which to call the methods * postponing the method calls (post() -> apply) Additionally, a 'traits' static attribute is provided: * this list contains methods * the methods can remove or add methods from self.meths Example1: the attribute 'staticlib' is set on an instance a method set in the list of traits is executed when the instance is posted, it finds that flag and adds another method for execution Example2: a method set in the list of traits finds the msvc compiler (from self.env['MSVC']==1); more methods are added to self.meths """ __metaclass__ = register_obj mappings = {} mapped = {} prec = {} traits = {} classes = {} def __init__(self): self.prec = {} "map precedence of function names to call" # so we will have to play with directed acyclic graphs # detect cycles, etc self.source = '' self.target = '' # list of methods to execute - in general one does not touch it by hand self.meths = set(['apply_core']) # list of mappings extension -> function self.mappings = {} # list of features (see the documentation on traits) self.features = [] # not always a good idea self.m_tasks = [] self.chmod = 0644 self.inst_var = 0 # 0 to prevent installation self.inst_dir = '' if Params.g_install: self.inst_files = [] # lazy list of tuples representing the files to install # kind of private, beware of what you put in it, also, the contents are consumed self.allnodes = [] self.env = Params.g_build.m_allenvs['default'].copy() self.m_posted = 0 self.path = Params.g_build.m_curdirnode # emulate chdir when reading scripts self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity) g_allobjs.append(self) def __str__(self): return ("" % (self.name or self.target, self.__class__.__name__, str(self.path))) def __setattr__(self, name, attr): real = typos.get(name, name) if real != name: Params.warning('typo %s -> %s' % (name, real)) if Params.g_verbose > 0: traceback.print_stack() object.__setattr__(self, real, attr) def to_list(self, value): "helper: returns a list" if type(value) is types.StringType: return value.split() else: return value def addflags(self, var, value): "utility function add self.cxxflags -> env['CXXFLAGS']" self.env.append_value(var, self.to_list(value)) def add_method(self, name): "add a method to execute" # TODO adding functions ? self.meths.append(name) def install(self): # FIXME # ambiguity with the install functions # it is often better to install the targets right after they are up-to_date # but this means attaching the install to the task objects if not Params.g_install: return for (name, var, dir, chmod) in self.inst_files: print name, var, dir, chmod # TODO ugly code def install_results(self, var, subdir, task, chmod=0644): debug('install results called', 'object') if not task: return current = Params.g_build.m_curdirnode lst = [a.relpath_gen(current) for a in task.m_outputs] Common.install_files(var, subdir, lst, chmod=chmod, env=self.env) def meth_order(self, *k): "this one adds the methods to the list of methods" assert(len(k) > 1) n = len(k) - 1 for i in xrange(n): f1 = k[i] f2 = k[i+1] try: self.prec[f2].append(f1) except: self.prec[f2] = [f1] if not f1 in self.meths: self.meths.append(f1) if not f2 in self.meths: self.meths.append(f2) def apply_core(self): # get the list of folders to use by the scanners # all our objects share the same include paths anyway tree = Params.g_build lst = self.to_list(self.source) find_source_lst = self.path.find_source_lst for filename in lst: # if self.mappings or task_gen.mappings contains a file of the same name x = self.get_hook(filename) if x: x(self, filename) node = find_source_lst(Utils.split_path(filename)) if not node: fatal("source not found: %s in %s" % (filename, str(self.path))) self.allnodes.append(node) while self.allnodes: node = self.allnodes.pop() # self.mappings or task_gen.mappings map the file extension to a function filename = node.m_name k = max(0, filename.rfind('.')) x = self.get_hook(filename[k:]) if not x: raise TypeError, "Do not know how to process %s in %s, mappings are %s" % \ (str(node), str(self.__class__), str(self.__class__.mappings)) x(self, node) def apply(self): "order the methods to execute using self.prec or task_gen.prec" dct = self.__class__.__dict__ keys = self.meths # add the methods listed in the features for x in self.features: keys.update(task_gen.traits[x]) # copy the precedence table with the keys in self.meths prec = {} prec_tbl = self.prec or task_gen.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in prec: for x in prec.values(): if a in x: break else: tmp.append(a) # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) if prec: fatal("graph has a cycle" % str(prec)) out.reverse() self.meths = out # then we run the methods in order for x in out: v = self.get_meth(x) debug("apply "+x, 'task_gen') v() def post(self): "runs the code to create the tasks, do not subclass" if not self.name: self.name = self.target if self.m_posted: error("OBJECT ALREADY POSTED") return self.apply() debug("posted %s" % self.name, 'object') self.m_posted = 1 def get_hook(self, ext): map = self.mappings for x in self.mappings: if x == ext: return map[x] map = task_gen.mappings for x in map: if x == ext: return map[x] return None def get_meth(self, name): try: return getattr(self, name) except AttributeError: raise AttributeError, "tried to retrieve %s which is not a valid method" % name def create_task(self, type, env=None, nice=None): task = Task.Task(type, env or self.env) if nice: task.prio = nice self.m_tasks.append(task) return task def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]): "subclass if necessary" lst = [] excludes = self.to_list(excludes) #make sure dirnames is a list helps with dirnames with spaces dirnames = self.to_list(dirnames) ext_lst = exts or self.mappings.keys() + task_gen.mappings.keys() # FIXME the following two lines should be removed try: ext_lst += self.s_default_ext except AttributeError: pass for name in dirnames: anode = self.path.ensure_node_from_lst(Utils.split_path(name)) Params.g_build.rescan(anode) for file in anode.files(): (base, ext) = os.path.splitext(file.m_name) if ext in ext_lst: s = file.relpath(self.path) if not s in lst: if s in excludes: continue lst.append(s) lst.sort() self.source = self.to_list(self.source) if not self.source: self.source = lst else: self.source += lst def clone(self, env): newobj = copy.deepcopy(self) newobj.path = self.path if type(env) is types.StringType: newobj.env = Params.g_build.m_allenvs[env] else: newobj.env = env g_allobjs.append(newobj) return newobj def declare_extension(var, func): if type(var) is types.ListType: for x in var: task_gen.mappings[x] = func elif type(var) is types.StringType: task_gen.mappings[var] = func else: raise TypeError('declare extension takes either a list or a string %s' % str(var)) task_gen.mapped[func.__name__] = func def declare_order(*k): assert(len(k) > 1) n = len(k) - 1 for i in xrange(n): f1 = k[i] f2 = k[i+1] try: if not f1 in task_gen.prec[f2]: task_gen.prec[f2].append(f1) except: task_gen.prec[f2] = [f1] def declare_chain(name='', action='', ext_in=[], ext_out='', reentrant=1, color='BLUE', prio=40, install=0): """ see Tools/flex.py for an example while i do not like such wrappers, some people really do """ if type(action) == types.StringType: Action.simple_action(name, action, color=color, prio=prio) else: name = action.name def x_file(self, node): if type(ext_out) == types.StringType: ext = ext_out else: ext = ext_out(self, node) if type(ext) == types.StringType: out_source = node.change_ext(ext) if reentrant: self.allnodes.append(out_source) elif type(ext) == types.ListType: out_source = [node.change_ext(x) for x in ext] if reentrant: for i in xrange(reentrant): self.allnodes.append(out_source[i]) else: fatal("do not know how to process %s" % str(ext)) tsk = self.create_task(name) tsk.set_inputs(node) tsk.set_outputs(out_source) if Params.g_install and install: tsk.install = install declare_extension(ext_in, x_file) def add_feature(name, methods): lst = Utils.to_list(methods) try: l = task_gen.traits[name] except KeyError: l = set() task_gen.traits[name] = l l.update(lst) # decorators follow def taskgen(f): setattr(task_gen, f.__name__, f) def feature(name): def deco(f): #print name, f try: l = task_gen.traits[name] except KeyError: l = set() task_gen.traits[name] = l l.update([f.__name__]) return f return deco def before(fun_name): def deco(f): try: if not f.__name__ in task_gen.prec[fun_name]: task_gen.prec[fun_name].append(f.__name__) except KeyError: task_gen.prec[fun_name] = [f.__name__] return f return deco def after(fun_name): def deco(f): try: if not fun_name in task_gen.prec[f.__name__]: task_gen.prec[f.__name__].append(fun_name) except KeyError: task_gen.prec[f.__name__] = [fun_name] return f return deco def extension(var): if type(var) is types.ListType: pass elif type(var) is types.StringType: var = [var] else: raise TypeError('declare extension takes either a list or a string %s' % str(var)) def deco(f): for x in var: task_gen.mappings[x] = f task_gen.mapped[f.__name__] = f return f return deco lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Task.py0000664000175000017500000003202210771525014021134 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 (ita) "Atomic operations that create nodes or execute commands" import os, types, shutil try: from hashlib import md5 except ImportError: from md5 import md5 import Params, Scan, Action, Runner, Common from Params import debug, error, warning class TaskManager(object): """The manager is attached to the build object, it holds a list of TaskGroup Each TaskGroup contains a map(priority, list of tasks)""" def __init__(self): self.groups = [] self.idx = 0 self.tasks_done = [] def flush(self): for k in self.groups: k.flush() def add_group(self, name=''): if not name: size = len(self.groups) name = 'group-%d' % size if not self.groups: self.groups = [TaskGroup(name)] return if not self.groups[0].tasks: warning('add_group: an empty group is already present') return self.groups = self.groups + [TaskGroup(name)] def add_task(self, task): if not self.groups: self.add_group('group-0') task.m_idx = self.idx self.idx += 1 self.groups[-1].add_task(task) def total(self): total = 0 if not self.groups: return 0 for group in self.groups: total += len(group.tasks) #for p in group.prio: # total += len(group.prio[p]) return total def debug(self): for i in self.groups: print "-----group-------", i.name for j in i.prio: print "prio: ", j, str(i.prio[j]) def add_finished(self, tsk): self.tasks_done.append(tsk) # TODO we could install using threads here if Params.g_install and hasattr(tsk, 'install'): d = tsk.install if type(d) is types.FunctionType: d(tsk) elif type(d) is types.StringType: if not tsk.env()[d]: return lst = [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_outputs] Common.install_files(tsk.env()[d], '', lst, chmod=0644, env=tsk.env()) else: if not d['var']: return lst = [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_outputs] if d.get('src', 0): lst += [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_inputs] # TODO ugly hack if d.get('as', ''): Common.install_as(d['var'], d['dir']+d['as'], lst[0], chmod=d.get('chmod', 0644), env=tsk.env()) else: Common.install_files(d['var'], d['dir'], lst, chmod=d.get('chmod', 0644), env=tsk.env()) class TaskGroup(object): "A TaskGroup maps priorities (integers) to lists of tasks" def __init__(self, name): self.name = name self.info = '' self.tasks = [] self.prio = {} def add_task(self, task): try: self.tasks.append(task) except KeyError: self.tasks = [task] def flush(self): # FIXME TODO in the future we will allow to go back in the past for x in self.tasks: try: p = getattr(x, 'prio') except AttributeError: try: p = x.m_action.prio except AttributeError: p = 100 try: self.prio[p].append(x) except KeyError: self.prio[p] = [x] class TaskBase(object): "TaskBase is the base class for task objects" def __init__(self, normal=1): self.m_display = '' self.m_hasrun=0 manager = Params.g_build.task_manager if normal: manager.add_task(self) else: self.m_idx = manager.idx manager.idx += 1 def may_start(self): "non-zero if the task is ready" return 1 def must_run(self): "0 if the task does not need to run" return 1 def prepare(self): "prepare the task for further processing" pass def update_stat(self): "update the dependency tree (node stats)" pass def debug_info(self): "return debug info" return '' def debug(self): "prints the debug info" pass def run(self): "process the task" pass def color(self): "color to use for the console messages" return 'BLUE' def set_display(self, v): self.m_display = v def get_display(self): return self.m_display class Task(TaskBase): "The most common task, it has input and output nodes" def __init__(self, action_name, env, normal=1, prio=None): TaskBase.__init__(self, normal=normal) # name of the action associated to this task type self.m_action = Action.g_actions[action_name] if not (prio is None): self.prio = prio # environment in use self.m_env = env # inputs and outputs are nodes # use setters when possible self.m_inputs = [] self.m_outputs = [] self.m_deps_nodes = [] self.m_run_after = [] # Additionally, you may define the following #self.dep_vars = 'PREFIX DATADIR' #self.m_scanner = some_scanner_object def env(self): # TODO IDEA in the future, attach the task generator instead of the env return self.m_env def __repr__(self): return "".join(['\n\t{task: ', self.m_action.m_name, " ", ",".join([x.m_name for x in self.m_inputs]), " -> ", ",".join([x.m_name for x in self.m_outputs]), '}']) def set_inputs(self, inp): if type(inp) is types.ListType: self.m_inputs += inp else: self.m_inputs.append(inp) def set_outputs(self, out): if type(out) is types.ListType: self.m_outputs += out else: self.m_outputs.append(out) def set_run_after(self, task): "set (scheduler) dependency on another task" # TODO: handle list or object assert isinstance(task, TaskBase) self.m_run_after.append(task) def get_run_after(self): try: return self.m_run_after except AttributeError: return [] def add_file_dependency(self, filename): "TODO user-provided file dependencies" node = Params.g_build.m_current.find_build(filename) self.m_deps_nodes.append(node) #------------ users are probably less interested in the following methods --------------# def signature(self): # compute the result one time, and suppose the scanner.get_signature will give the good result try: return self.sign_all except AttributeError: pass env = self.env() tree = Params.g_build m = md5() # TODO maybe we could split this dep sig into two parts (nodes, dependencies) # this would only help for debugging though dep_sig = Params.sig_nil scan = getattr(self, 'm_scanner', None) if scan: dep_sig = scan.get_signature(self) m.update(dep_sig) else: # compute the signature from the inputs (no scanner) for x in self.m_inputs: v = tree.m_tstamp_variants[x.variant(env)][x] dep_sig = hash( (dep_sig, v) ) m.update(v) # manual dependencies, they can slow down the builds try: additional_deps = tree.deps_man for x in self.m_inputs + self.m_outputs: try: d = additional_deps[x] except KeyError: continue if callable(d): d = d() # dependency is a function, call it dep_sig = hash( (dep_sig, d) ) m.update(d) except AttributeError: pass # dependencies on the environment vars fun = getattr(self.m_action, 'signature', None) if fun: act_sig = self.m_action.signature(self) else: act_sig = env.sign_vars(self.m_action.m_vars) m.update(act_sig) # additional variable dependencies, if provided var_sig = None dep_vars = getattr(self, 'dep_vars', None) if dep_vars: var_sig = env.sign_vars(dep_vars) m.update(var_sig) # additional nodes to depend on, if provided node_sig = Params.sig_nil dep_nodes = getattr(self, 'dep_nodes', []) for x in dep_nodes: variant = x.variant(env) v = tree.m_tstamp_variants[variant][x] node_sig = hash( (node_sig, v) ) m.update(v) # we now have the array of signatures ret = m.digest() self.cache_sig = [ret, dep_sig, act_sig, var_sig, node_sig] # TODO can be dangerous self.sign_all = ret return ret def may_start(self): "wait for other tasks to complete" if (not self.m_inputs) or (not self.m_outputs): if not (not self.m_inputs) and (not self.m_outputs): error("potentially grave error, task is invalid : no inputs or outputs") self.debug() # the scanner has its word to say scan = getattr(self, 'm_scanner', None) if scan: fun = getattr(scan, 'may_start', None) if fun: if not fun(self): return 0 # this is a dependency using the scheduler, as opposed to hash-based ones for t in self.get_run_after(): if not t.m_hasrun: return 0 return 1 def must_run(self): "see if the task must be run or not" #return 0 # benchmarking env = self.env() tree = Params.g_build # tasks that have no inputs or outputs are run each time if not self.m_inputs and not self.m_outputs: self.m_dep_sig = Params.sig_nil return 1 # look at the previous signature first node = self.m_outputs[0] variant = node.variant(env) try: time = tree.m_tstamp_variants[variant][node] except KeyError: debug("task #%d should run as the first node does not exist" % self.m_idx, 'task') try: new_sig = self.signature() except KeyError: print "TODO - computing the signature failed" return 1 ret = self.can_retrieve_cache(new_sig) return not ret key = hash( (variant, node, time, getattr(self, 'm_scanner', self).__class__.__name__) ) prev_sig = tree.m_sig_cache[key][0] #print "prev_sig is ", prev_sig new_sig = self.signature() # debug if asked to if Params.g_zones: self.debug_why(tree.m_sig_cache[key]) if new_sig != prev_sig: # try to retrieve the file from the cache ret = self.can_retrieve_cache(new_sig) return not ret return 0 def update_stat(self): "called after a sucessful task run" tree = Params.g_build env = self.env() sig = self.signature() cnt = 0 for node in self.m_outputs: variant = node.variant(env) #if node in tree.m_tstamp_variants[variant]: # print "variant is ", variant # print "self sig is ", Params.view_sig(tree.m_tstamp_variants[variant][node]) # check if the node exists .. os.stat(node.abspath(env)) # important, store the signature for the next run tree.m_tstamp_variants[variant][node] = sig # We could re-create the signature of the task with the signature of the outputs # in practice, this means hashing the output files # this is unnecessary if Params.g_cache_global: ssig = sig.encode('hex') dest = os.path.join(Params.g_cache_global, ssig+'-'+str(cnt)) try: shutil.copy2(node.abspath(env), dest) except IOError: warning('could not write the file to the cache') cnt += 1 # keep the signatures in the first node node = self.m_outputs[0] variant = node.variant(env) time = tree.m_tstamp_variants[variant][node] key = hash( (variant, node, time, getattr(self, 'm_scanner', self).__class__.__name__) ) val = self.cache_sig tree.set_sig_cache(key, val) self.m_executed=1 def can_retrieve_cache(self, sig): """Retrieve build nodes from the cache - the file time stamps are updated for cleaning the least used files from the cache dir - be careful when overriding""" if not Params.g_cache_global: return None if Params.g_options.nocache: return None env = self.env() sig = self.signature() cnt = 0 for node in self.m_outputs: variant = node.variant(env) ssig = sig.encode('hex') orig = os.path.join(Params.g_cache_global, ssig+'-'+str(cnt)) try: shutil.copy2(orig, node.abspath(env)) os.utime(orig, None) # mark the cache file as used recently (modified) except (OSError, IOError): debug("failed retrieving file", 'task') return None else: cnt += 1 Params.g_build.m_tstamp_variants[variant][node] = sig if not Runner.g_quiet: Params.pprint('GREEN', 'restored from cache %s' % node.bldpath(env)) return 1 def prepare(self): try: self.m_action.prepare(self) except AttributeError: pass def run(self): return self.m_action.run(self) def get_display(self): if self.m_display: return self.m_display self.m_display=self.m_action.get_str(self) return self.m_display def color(self): return self.m_action.m_color def debug_info(self): ret = [] ret.append('-- task details begin --') ret.append('action: %s' % str(self.m_action)) ret.append('idx: %s' % str(self.m_idx)) ret.append('source: %s' % str(self.m_inputs)) ret.append('target: %s' % str(self.m_outputs)) ret.append('-- task details end --') return '\n'.join(ret) def debug(self, level=0): fun = Params.debug if level>0: fun = Params.error fun(self.debug_info()) def debug_why(self, old_sigs): "explains why a task is run" new_sigs = self.cache_sig v = Params.view_sig debug("Task %s must run: %s" % (self.m_idx, old_sigs[0] != new_sigs[0]), 'task') if (new_sigs[1] != old_sigs[1]): debug(' -> A source file (or a dependency) has changed %s %s' % (v(old_sigs[1]), v(new_sigs[1])), 'task') if (new_sigs[2] != old_sigs[2]): debug(' -> An environment variable has changed %s %s' % (v(old_sigs[2]), v(new_sigs[2])), 'task') if (new_sigs[3] != old_sigs[3]): debug(' -> A manual dependency has changed %s %s' % (v(old_sigs[3]), v(new_sigs[3])), 'task') if (new_sigs[4] != old_sigs[4]): debug(' -> A user-given environment variable has changed %s %s' % (v(old_sigs[4]), v(new_sigs[4])), 'task') class TaskCmd(TaskBase): "TaskCmd executes commands. Instances always execute their function" def __init__(self, fun, env): TaskBase.__init__(self) self.fun = fun self.m_env = env def prepare(self): self.m_display = "* executing: %s" % self.fun.__name__ def debug_info(self): return 'TaskCmd:fun %s' % self.fun.__name__ def debug(self): return 'TaskCmd:fun %s' % self.fun.__name__ def run(self): self.fun(self) def env(self): return self.m_env lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Constants.py0000664000175000017500000000110010771525014022177 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Yinon dot me gmail 2008 # maintainer the version number is updated from the top-level wscript file HEXVERSION = 0x10303 ABI = 2 CACHE_DIR = 'c4che' CACHE_SUFFIX = '.cache.py' DBFILE = '.wafpickle-%d' % ABI WSCRIPT_FILE = 'wscript' WSCRIPT_BUILD_FILE = 'wscript_build' COMMON_INCLUDES = 'COMMON_INCLUDES' SRCDIR = 'srcdir' BLDDIR = 'blddir' APPNAME = 'APPNAME' VERSION = 'VERSION' DEFINES = 'defines' UNDEFINED = '#undefined#variable#for#defines#' lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/pproc.py0000664000175000017500000005111210771525014021356 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 # borrowed from python 2.5.2c1 # Copyright (c) 2003-2005 by Peter Astrand # Licensed to PSF under a Contributor Agreement. import sys mswindows = (sys.platform == "win32") import os import types import traceback import gc class CalledProcessError(Exception): def __init__(self, returncode, cmd): self.returncode = returncode self.cmd = cmd def __str__(self): return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) if mswindows: import threading import msvcrt if 0: import pywintypes from win32api import GetStdHandle, STD_INPUT_HANDLE, \ STD_OUTPUT_HANDLE, STD_ERROR_HANDLE from win32api import GetCurrentProcess, DuplicateHandle, \ GetModuleFileName, GetVersion from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE from win32pipe import CreatePipe from win32process import CreateProcess, STARTUPINFO, \ GetExitCodeProcess, STARTF_USESTDHANDLES, \ STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 else: from _subprocess import * class STARTUPINFO: dwFlags = 0 hStdInput = None hStdOutput = None hStdError = None wShowWindow = 0 class pywintypes: error = IOError else: import select import errno import fcntl import pickle __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"] try: MAXFD = os.sysconf("SC_OPEN_MAX") except: MAXFD = 256 try: False except NameError: False = 0 True = 1 _active = [] def _cleanup(): for inst in _active[:]: if inst.poll(_deadstate=sys.maxint) >= 0: try: _active.remove(inst) except ValueError: pass PIPE = -1 STDOUT = -2 def call(*popenargs, **kwargs): return Popen(*popenargs, **kwargs).wait() def check_call(*popenargs, **kwargs): retcode = call(*popenargs, **kwargs) cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] if retcode: raise CalledProcessError(retcode, cmd) return retcode def list2cmdline(seq): result = [] needquote = False for arg in seq: bs_buf = [] if result: result.append(' ') needquote = (" " in arg) or ("\t" in arg) or arg == "" if needquote: result.append('"') for c in arg: if c == '\\': bs_buf.append(c) elif c == '"': result.append('\\' * len(bs_buf)*2) bs_buf = [] result.append('\\"') else: if bs_buf: result.extend(bs_buf) bs_buf = [] result.append(c) if bs_buf: result.extend(bs_buf) if needquote: result.extend(bs_buf) result.append('"') return ''.join(result) class Popen(object): def __init__(self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0): _cleanup() self._child_created = False if not isinstance(bufsize, (int, long)): raise TypeError("bufsize must be an integer") if mswindows: if preexec_fn is not None: raise ValueError("preexec_fn is not supported on Windows platforms") if close_fds: raise ValueError("close_fds is not supported on Windows platforms") else: if startupinfo is not None: raise ValueError("startupinfo is only supported on Windows platforms") if creationflags != 0: raise ValueError("creationflags is only supported on Windows platforms") self.stdin = None self.stdout = None self.stderr = None self.pid = None self.returncode = None self.universal_newlines = universal_newlines (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) = self._get_handles(stdin, stdout, stderr) self._execute_child(args, executable, preexec_fn, close_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) if mswindows: if stdin is None and p2cwrite is not None: os.close(p2cwrite) p2cwrite = None if stdout is None and c2pread is not None: os.close(c2pread) c2pread = None if stderr is None and errread is not None: os.close(errread) errread = None if p2cwrite: self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) if c2pread: if universal_newlines: self.stdout = os.fdopen(c2pread, 'rU', bufsize) else: self.stdout = os.fdopen(c2pread, 'rb', bufsize) if errread: if universal_newlines: self.stderr = os.fdopen(errread, 'rU', bufsize) else: self.stderr = os.fdopen(errread, 'rb', bufsize) def _translate_newlines(self, data): data = data.replace("\r\n", "\n") data = data.replace("\r", "\n") return data def __del__(self, sys=sys): if not self._child_created: return self.poll(_deadstate=sys.maxint) if self.returncode is None and _active is not None: _active.append(self) def communicate(self, input=None): if [self.stdin, self.stdout, self.stderr].count(None) >= 2: stdout = None stderr = None if self.stdin: if input: self.stdin.write(input) self.stdin.close() elif self.stdout: stdout = self.stdout.read() elif self.stderr: stderr = self.stderr.read() self.wait() return (stdout, stderr) return self._communicate(input) if mswindows: def _get_handles(self, stdin, stdout, stderr): if stdin is None and stdout is None and stderr is None: return (None, None, None, None, None, None) p2cread, p2cwrite = None, None c2pread, c2pwrite = None, None errread, errwrite = None, None if stdin is None: p2cread = GetStdHandle(STD_INPUT_HANDLE) if p2cread is not None: pass elif stdin is None or stdin == PIPE: p2cread, p2cwrite = CreatePipe(None, 0) p2cwrite = p2cwrite.Detach() p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) elif isinstance(stdin, int): p2cread = msvcrt.get_osfhandle(stdin) else: p2cread = msvcrt.get_osfhandle(stdin.fileno()) p2cread = self._make_inheritable(p2cread) if stdout is None: c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) if c2pwrite is not None: pass elif stdout is None or stdout == PIPE: c2pread, c2pwrite = CreatePipe(None, 0) c2pread = c2pread.Detach() c2pread = msvcrt.open_osfhandle(c2pread, 0) elif isinstance(stdout, int): c2pwrite = msvcrt.get_osfhandle(stdout) else: c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) c2pwrite = self._make_inheritable(c2pwrite) if stderr is None: errwrite = GetStdHandle(STD_ERROR_HANDLE) if errwrite is not None: pass elif stderr is None or stderr == PIPE: errread, errwrite = CreatePipe(None, 0) errread = errread.Detach() errread = msvcrt.open_osfhandle(errread, 0) elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = msvcrt.get_osfhandle(stderr) else: errwrite = msvcrt.get_osfhandle(stderr.fileno()) errwrite = self._make_inheritable(errwrite) return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _make_inheritable(self, handle): return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS) def _find_w9xpopen(self): w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe") if not os.path.exists(w9xpopen): w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe") if not os.path.exists(w9xpopen): raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.") return w9xpopen def _execute_child(self, args, executable, preexec_fn, close_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite): if not isinstance(args, types.StringTypes): args = list2cmdline(args) if startupinfo is None: startupinfo = STARTUPINFO() if None not in (p2cread, c2pwrite, errwrite): startupinfo.dwFlags |= STARTF_USESTDHANDLES startupinfo.hStdInput = p2cread startupinfo.hStdOutput = c2pwrite startupinfo.hStdError = errwrite if shell: startupinfo.dwFlags |= STARTF_USESHOWWINDOW startupinfo.wShowWindow = SW_HIDE comspec = os.environ.get("COMSPEC", "cmd.exe") args = comspec + " /c " + args if (GetVersion() >= 0x80000000L or os.path.basename(comspec).lower() == "command.com"): w9xpopen = self._find_w9xpopen() args = '"%s" %s' % (w9xpopen, args) creationflags |= CREATE_NEW_CONSOLE try: hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo) except pywintypes.error, e: raise WindowsError(*e.args) self._child_created = True self._handle = hp self.pid = pid ht.Close() if p2cread is not None: p2cread.Close() if c2pwrite is not None: c2pwrite.Close() if errwrite is not None: errwrite.Close() def poll(self, _deadstate=None): if self.returncode is None: if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: self.returncode = GetExitCodeProcess(self._handle) return self.returncode def wait(self): if self.returncode is None: obj = WaitForSingleObject(self._handle, INFINITE) self.returncode = GetExitCodeProcess(self._handle) return self.returncode def _readerthread(self, fh, buffer): buffer.append(fh.read()) def _communicate(self, input): stdout = None stderr = None if self.stdout: stdout = [] stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout)) stdout_thread.setDaemon(True) stdout_thread.start() if self.stderr: stderr = [] stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr)) stderr_thread.setDaemon(True) stderr_thread.start() if self.stdin: if input is not None: self.stdin.write(input) self.stdin.close() if self.stdout: stdout_thread.join() if self.stderr: stderr_thread.join() if stdout is not None: stdout = stdout[0] if stderr is not None: stderr = stderr[0] if self.universal_newlines and hasattr(file, 'newlines'): if stdout: stdout = self._translate_newlines(stdout) if stderr: stderr = self._translate_newlines(stderr) self.wait() return (stdout, stderr) else: def _get_handles(self, stdin, stdout, stderr): p2cread, p2cwrite = None, None c2pread, c2pwrite = None, None errread, errwrite = None, None if stdin is None: pass elif stdin == PIPE: p2cread, p2cwrite = os.pipe() elif isinstance(stdin, int): p2cread = stdin else: p2cread = stdin.fileno() if stdout is None: pass elif stdout == PIPE: c2pread, c2pwrite = os.pipe() elif isinstance(stdout, int): c2pwrite = stdout else: c2pwrite = stdout.fileno() if stderr is None: pass elif stderr == PIPE: errread, errwrite = os.pipe() elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = stderr else: errwrite = stderr.fileno() return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _set_cloexec_flag(self, fd): try: cloexec_flag = fcntl.FD_CLOEXEC except AttributeError: cloexec_flag = 1 old = fcntl.fcntl(fd, fcntl.F_GETFD) fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) def _close_fds(self, but): for i in xrange(3, MAXFD): if i == but: continue try: os.close(i) except: pass def _execute_child(self, args, executable, preexec_fn, close_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite): if isinstance(args, types.StringTypes): args = [args] else: args = list(args) if shell: args = ["/bin/sh", "-c"] + args if executable is None: executable = args[0] errpipe_read, errpipe_write = os.pipe() self._set_cloexec_flag(errpipe_write) gc_was_enabled = gc.isenabled() gc.disable() try: self.pid = os.fork() except: if gc_was_enabled: gc.enable() raise self._child_created = True if self.pid == 0: try: if p2cwrite: os.close(p2cwrite) if c2pread: os.close(c2pread) if errread: os.close(errread) os.close(errpipe_read) if p2cread: os.dup2(p2cread, 0) if c2pwrite: os.dup2(c2pwrite, 1) if errwrite: os.dup2(errwrite, 2) if p2cread and p2cread not in (0,): os.close(p2cread) if c2pwrite and c2pwrite not in (p2cread, 1): os.close(c2pwrite) if errwrite and errwrite not in (p2cread, c2pwrite, 2): os.close(errwrite) if close_fds: self._close_fds(but=errpipe_write) if cwd is not None: os.chdir(cwd) if preexec_fn: apply(preexec_fn) if env is None: os.execvp(executable, args) else: os.execvpe(executable, args, env) except: exc_type, exc_value, tb = sys.exc_info() exc_lines = traceback.format_exception(exc_type, exc_value, tb) exc_value.child_traceback = ''.join(exc_lines) os.write(errpipe_write, pickle.dumps(exc_value)) os._exit(255) if gc_was_enabled: gc.enable() os.close(errpipe_write) if p2cread and p2cwrite: os.close(p2cread) if c2pwrite and c2pread: os.close(c2pwrite) if errwrite and errread: os.close(errwrite) data = os.read(errpipe_read, 1048576) os.close(errpipe_read) if data != "": os.waitpid(self.pid, 0) child_exception = pickle.loads(data) raise child_exception def _handle_exitstatus(self, sts): if os.WIFSIGNALED(sts): self.returncode = -os.WTERMSIG(sts) elif os.WIFEXITED(sts): self.returncode = os.WEXITSTATUS(sts) else: raise RuntimeError("Unknown child exit status!") def poll(self, _deadstate=None): if self.returncode is None: try: pid, sts = os.waitpid(self.pid, os.WNOHANG) if pid == self.pid: self._handle_exitstatus(sts) except os.error: if _deadstate is not None: self.returncode = _deadstate return self.returncode def wait(self): if self.returncode is None: pid, sts = os.waitpid(self.pid, 0) self._handle_exitstatus(sts) return self.returncode def _communicate(self, input): read_set = [] write_set = [] stdout = None stderr = None if self.stdin: self.stdin.flush() if input: write_set.append(self.stdin) else: self.stdin.close() if self.stdout: read_set.append(self.stdout) stdout = [] if self.stderr: read_set.append(self.stderr) stderr = [] input_offset = 0 while read_set or write_set: rlist, wlist, xlist = select.select(read_set, write_set, []) if self.stdin in wlist: bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512)) input_offset += bytes_written if input_offset >= len(input): self.stdin.close() write_set.remove(self.stdin) if self.stdout in rlist: data = os.read(self.stdout.fileno(), 1024) if data == "": self.stdout.close() read_set.remove(self.stdout) stdout.append(data) if self.stderr in rlist: data = os.read(self.stderr.fileno(), 1024) if data == "": self.stderr.close() read_set.remove(self.stderr) stderr.append(data) if stdout is not None: stdout = ''.join(stdout) if stderr is not None: stderr = ''.join(stderr) if self.universal_newlines and hasattr(file, 'newlines'): if stdout: stdout = self._translate_newlines(stdout) if stderr: stderr = self._translate_newlines(stderr) self.wait() return (stdout, stderr) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/UnitTest.py0000664000175000017500000001354410771525014022021 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2006 """ Unit tests run in the shutdown() method, and for c/c++ programs One should NOT have to give parameters to programs to execute In the shutdown method, add the following code: >>> def shutdown(): ... ut = UnitTest.unit_test() ... ut.run() ... ut.print_results() Each object to use as a unit test must be a program and must have X{obj.unit_test=1} """ import os, sys import Params, Object, Utils import pproc as subprocess class unit_test(object): "Unit test representation" def __init__(self): self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one # will cause the unit test to be marked as "FAILED". # The following variables are filled with data by run(). # print_results() uses these for printing the unit test summary, # but if there is need for direct access to the results, # they can be retrieved here, after calling run(). self.num_tests_ok = 0 # Number of successful unit tests self.num_tests_failed = 0 # Number of failed unit tests self.num_tests_err = 0 # Tests that have not even run self.total_num_tests = 0 # Total amount of unit tests self.max_label_length = 0 # Maximum label length (pretty-print the output) self.unit_tests = {} # Unit test dictionary. Key: the label (unit test filename relative # to the build dir), value: unit test filename with absolute path self.unit_test_results = {} # Dictionary containing the unit test results. # Key: the label, value: result (true = success false = failure) self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests. # Key: the label, value: true = unit test has an error false = unit test is ok self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites) self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites) self.run_if_waf_does = 'check' #build was the old default def run(self): "Run the unit tests and gather results (note: no output here)" self.num_tests_ok = 0 self.num_tests_failed = 0 self.num_tests_err = 0 self.total_num_tests = 0 self.max_label_length = 0 self.unit_tests = {} self.unit_test_results = {} self.unit_test_erroneous = {} # If waf is not building, don't run anything if not Params.g_commands[self.run_if_waf_does]: return # Gather unit tests to call for obj in Object.g_allobjs: if not hasattr(obj,'unit_test'): continue unit_test = getattr(obj,'unit_test') if not unit_test: continue try: if obj.m_type == 'program': filename = obj.link_task.m_outputs[0].abspath(obj.env) label = obj.link_task.m_outputs[0].bldpath(obj.env) self.max_label_length = max(self.max_label_length, len(label)) self.unit_tests[label] = filename except KeyError: pass self.total_num_tests = len(self.unit_tests) # Now run the unit tests col1=Params.g_colors['GREEN'] col2=Params.g_colors['NORMAL'] Params.pprint('GREEN', 'Running the unit tests') count = 0 result = 1 curdir = os.getcwd() # store the current dir (only if self.change_to_testfile_dir) for label, filename in self.unit_tests.iteritems(): count += 1 line = Utils.progress_line(count, self.total_num_tests, col1, col2) if Params.g_options.progress_bar and line: sys.stdout.write(line) sys.stdout.flush() try: if self.change_to_testfile_dir: os.chdir(os.path.dirname(filename)) kwargs = dict() if not self.want_to_see_test_output: kwargs['stdout'] = subprocess.PIPE # PIPE for ignoring output if not self.want_to_see_test_error: kwargs['stderr'] = subprocess.PIPE # PIPE for ignoring output pp = subprocess.Popen(filename, **kwargs) pp.wait() if self.change_to_testfile_dir: os.chdir(curdir) result = int(pp.returncode == self.returncode_ok) if result: self.num_tests_ok += 1 else: self.num_tests_failed += 1 self.unit_test_results[label] = result self.unit_test_erroneous[label] = 0 except OSError: self.unit_test_erroneous[label] = 1 self.num_tests_err += 1 except KeyboardInterrupt: if Params.g_options.progress_bar: sys.stdout.write(Params.g_cursor_off) if Params.g_options.progress_bar: sys.stdout.write(Params.g_cursor_off) def print_results(self): "Pretty-prints a summary of all unit tests, along with some statistics" # If waf is not building, don't output anything if not Params.g_commands[self.run_if_waf_does]: return p = Params.pprint # Early quit if no tests were performed if self.total_num_tests == 0: p('YELLOW', 'No unit tests present') return p('GREEN', 'Running unit tests') print for label, filename in self.unit_tests.iteritems(): err = 0 result = 0 try: err = self.unit_test_erroneous[label] except KeyError: pass try: result = self.unit_test_results[label] except KeyError: pass n = self.max_label_length - len(label) if err: n += 4 elif result: n += 7 else: n += 3 line = '%s %s' % (label, '.' * n) print line, if err: p('RED', 'ERROR') elif result: p('GREEN', 'OK') else: p('YELLOW', 'FAILED') percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0 percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0 percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0 print ''' Successful tests: %i (%.1f%%) Failed tests: %i (%.1f%%) Erroneous tests: %i (%.1f%%) Total number of tests: %i ''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed, self.num_tests_err, percentage_erroneous, self.total_num_tests) p('GREEN', 'Unit tests finished') lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/__init__.py0000664000175000017500000000015610771525014021774 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) lv2fil-2.0+20100312.git18130f5a+dfsg0/wafadmin/Environment.py0000664000175000017500000001067610771525014022551 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 #! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005 (ita) "Environment representation" import os,types, copy, re import Params from Params import debug, warning re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) g_cache_max = {} g_idx = 0 class Environment(object): """A safe-to-use dictionary, but do not attach functions to it please (break cPickle) An environment instance can be stored into a file and loaded easily """ def __init__(self): global g_idx self.m_idx = g_idx g_idx += 1 self.m_table={} #self.m_parent = None <- set only if necessary # set the prefix once and for everybody on creation (configuration) self.m_table['PREFIX'] = Params.g_options.prefix def __contains__(self, key): if key in self.m_table: return True try: return self.m_parent.__contains__(key) except AttributeError: return False # m_parent may not exist def set_variant(self, name): self.m_table['_VARIANT_'] = name def variant(self): env = self while 1: try: return env.m_table['_VARIANT_'] except KeyError: try: env = env.m_parent except AttributeError: return 'default' def copy(self): newenv = Environment() newenv.m_parent = self return newenv def __str__(self): return "environment table\n"+str(self.m_table) def __getitem__(self, key): try: return self.m_table[key] except KeyError: try: return self.m_parent[key] except: return [] def __setitem__(self, key, value): self.m_table[key] = value def get_flat(self, key): s = self[key] if not s: return '' elif isinstance(s, list): return ' '.join(s) else: return s def _get_list_value_for_modification(self, key): """Gets a value that must be a list for further modification. The list may be modified inplace and there is no need to "self.m_table[var] = value" afterwards. """ try: value = self.m_table[key] except KeyError: try: value = self.m_parent[key] except AttributeError: value = [] if isinstance(value, list): value = copy.copy(value) else: value = [value] self.m_table[key] = value return value else: if isinstance(value, list): return value # no need to copy the list, it is not borrowed <- TODO ?? else: value = [value] self.m_table[key] = value return value def append_value(self, var, value): current_value = self._get_list_value_for_modification(var) if isinstance(value, list): current_value.extend(value) else: current_value.append(value) def prepend_value(self, var, value): current_value = self._get_list_value_for_modification(var) if isinstance(value, list): current_value = value + current_value # a new list: update the dictionary entry self.m_table[var] = current_value else: current_value.insert(0, value) # prepend unique would be ambiguous def append_unique(self, var, value): current_value = self._get_list_value_for_modification(var) if isinstance(value, list): for value_item in value: if value_item not in current_value: current_value.append(value_item) else: if value not in current_value: current_value.append(value) def store(self, filename): "Write the variables into a file" file = open(filename, 'w') # compute a merged table table_list = [] env = self while 1: table_list.insert(0, env.m_table) try: env = env.m_parent except AttributeError: break merged_table = dict() for table in table_list: merged_table.update(table) keys = merged_table.keys() keys.sort() for k in keys: file.write('%s = %r\n' % (k, merged_table[k])) file.close() def load(self, filename): "Retrieve the variables from a file" tbl = self.m_table file = open(filename, 'r') code = file.read() file.close() for m in re_imp.finditer(code): g = m.group tbl[g(2)] = eval(g(3)) debug(self.m_table, 'env') def get_destdir(self): "return the destdir, useful for installing" if self.__getitem__('NOINSTALL'): return '' return Params.g_options.destdir def sign_vars(env, vars_list): " ['CXX', ..] -> [env['CXX'], ..]" # ccroot objects use the same environment for building the .o at once # the same environment and the same variables are used s = str([env.m_idx]+vars_list) try: return g_cache_max[s] except KeyError: pass lst = [env.get_flat(a) for a in vars_list] ret = Params.h_list(lst) if Params.g_zones: debug("%s %s" % (Params.view_sig(ret), str(lst)), 'envhash') # next time g_cache_max[s] = ret return ret lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2filter.c0000644000175000017500000001524611346337452020164 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * Copyright (C) 2006,2007,2008,2009 Nedko Arnaudov * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * *****************************************************************************/ #include #include #include #include #include #include #include #include "lv2filter.h" #include "filter.h" #define LOG_LEVEL LOG_LEVEL_ERROR #include "log.h" #define BANDS_COUNT 4 #define LV2_PORT_MONO_AUDIO_IN 0 #define LV2_PORT_MONO_AUDIO_OUT 1 #define LV2_MONO_AUDIO_PORT_COUNT 2 #define LV2_PORTS_COUNT_MONO (LV2_MONO_AUDIO_PORT_COUNT + GLOBAL_PARAMETERS_COUNT + BANDS_COUNT * BAND_PARAMETERS_COUNT) #define LV2_PORT_LEFT_AUDIO_IN 0 #define LV2_PORT_RIGHT_AUDIO_IN 1 #define LV2_PORT_LEFT_AUDIO_OUT 2 #define LV2_PORT_RIGHT_AUDIO_OUT 3 #define LV2_STEREO_AUDIO_PORT_COUNT 4 #define LV2_PORTS_COUNT_STEREO (LV2_STEREO_AUDIO_PORT_COUNT + GLOBAL_PARAMETERS_COUNT + BANDS_COUNT * BAND_PARAMETERS_COUNT) struct lv2filter { bool stereo; filter_handle filter; filter_handle filter_right; char * bundle_path; const float * audio_in; const float * audio_in_right; float * audio_out; float * audio_out_right; const LV2_Feature * const * host_features; }; LV2_Handle lv2filter_instantiate( const LV2_Descriptor * descriptor, double sample_rate, const char * bundle_path, const LV2_Feature * const * host_features) { struct lv2filter * lv2filter_ptr; const LV2_Feature * const * feature_ptr_ptr; LOG_DEBUG("lv2filter_create_plugin_instance() called."); LOG_DEBUG("uri = \"%s\"", descriptor->URI); LOG_DEBUG("sample_rate = %f", sample_rate); LOG_DEBUG("bundle_path = \"%s\"", bundle_path); feature_ptr_ptr = host_features; while (*feature_ptr_ptr) { LOG_DEBUG("Host feature <%s> detected", (*feature_ptr_ptr)->URI); feature_ptr_ptr++; } lv2filter_ptr = malloc(sizeof(struct lv2filter)); if (lv2filter_ptr == NULL) { goto fail; } if (strcmp(descriptor->URI, LV2FILTER_STEREO_URI) == 0) { lv2filter_ptr->stereo = true; } else if (strcmp(descriptor->URI, LV2FILTER_MONO_URI) == 0) { lv2filter_ptr->stereo = false; } else { assert(false); goto fail_free_instance; } lv2filter_ptr->host_features = host_features; lv2filter_ptr->bundle_path = strdup(bundle_path); if (lv2filter_ptr->bundle_path == NULL) { goto fail_free_instance; } if (!filter_create(sample_rate, BANDS_COUNT, &lv2filter_ptr->filter)) { goto fail_free_bundle_path; } if (lv2filter_ptr->stereo) { if (!filter_create(sample_rate, BANDS_COUNT, &lv2filter_ptr->filter_right)) { goto fail_destroy_filter; } } return (LV2_Handle)lv2filter_ptr; fail_destroy_filter: filter_destroy(lv2filter_ptr->filter); fail_free_bundle_path: free(lv2filter_ptr->bundle_path); fail_free_instance: free(lv2filter_ptr); fail: return NULL; } #define lv2filter_ptr ((struct lv2filter *)instance) /* The run() callback. This is the function that gets called by the host when it wants to run the plugin. The parameter is the number of sample frames to process. */ void lv2filter_run( LV2_Handle instance, uint32_t samples_count) { LOG_DEBUG("lv2filter_run"); filter_run( lv2filter_ptr->filter, lv2filter_ptr->audio_in, lv2filter_ptr->audio_out, samples_count); if (lv2filter_ptr->stereo) { filter_run( lv2filter_ptr->filter_right, lv2filter_ptr->audio_in_right, lv2filter_ptr->audio_out_right, samples_count); } } void lv2filter_cleanup( LV2_Handle instance) { filter_destroy(lv2filter_ptr->filter); if (lv2filter_ptr->stereo) { filter_destroy(lv2filter_ptr->filter_right); } free(lv2filter_ptr->bundle_path); free(lv2filter_ptr); } void lv2filter_connect_port( LV2_Handle instance, uint32_t port, void * data_location) { LOG_DEBUG("lv2filter_connect_port %u %p", (unsigned int)port, data_location); if (lv2filter_ptr->stereo) { if (port >= LV2_PORTS_COUNT_STEREO) { assert(0); return; } if (port == LV2_PORT_LEFT_AUDIO_IN) { lv2filter_ptr->audio_in = data_location; } else if (port == LV2_PORT_LEFT_AUDIO_OUT) { lv2filter_ptr->audio_out = data_location; } else if (port == LV2_PORT_RIGHT_AUDIO_IN) { lv2filter_ptr->audio_in_right = data_location; } else if (port == LV2_PORT_RIGHT_AUDIO_OUT) { lv2filter_ptr->audio_out_right = data_location; } else { assert(port >= LV2_STEREO_AUDIO_PORT_COUNT); port -= LV2_STEREO_AUDIO_PORT_COUNT; if (port < GLOBAL_PARAMETERS_COUNT) { filter_connect_global_parameter(lv2filter_ptr->filter, port, data_location); filter_connect_global_parameter(lv2filter_ptr->filter_right, port, data_location); } else { assert(port >= GLOBAL_PARAMETERS_COUNT); port -= GLOBAL_PARAMETERS_COUNT; filter_connect_band_parameter(lv2filter_ptr->filter, port / BANDS_COUNT, port % BANDS_COUNT, data_location); filter_connect_band_parameter(lv2filter_ptr->filter_right, port / BANDS_COUNT, port % BANDS_COUNT, data_location); } } } else { if (port >= LV2_PORTS_COUNT_MONO) { assert(0); return; } if (port == LV2_PORT_MONO_AUDIO_IN) { lv2filter_ptr->audio_in = data_location; } else if (port == LV2_PORT_MONO_AUDIO_OUT) { lv2filter_ptr->audio_out = data_location; } else { port -= LV2_MONO_AUDIO_PORT_COUNT; if (port < GLOBAL_PARAMETERS_COUNT) { filter_connect_global_parameter(lv2filter_ptr->filter, port, data_location); } else { port -= GLOBAL_PARAMETERS_COUNT; filter_connect_band_parameter(lv2filter_ptr->filter, port / BANDS_COUNT, port % BANDS_COUNT, data_location); } } } } const void * lv2filter_extension_data( const char * URI) { return NULL; } lv2fil-2.0+20100312.git18130f5a+dfsg0/README0000644000175000017500000000207311346337452016761 0ustar alessioalessio= Overview = Stereo and mono LV2 plugins, four-band parametric equalisers. Each section has an active/bypass switch, frequency, bandwidth and gain controls. There is also a global bypass switch and gain control. = DSP = The 2nd order resonant filters are implemented using a Mitra-Regalia style lattice filter, which has the nice property of being stable even while parameters are being changed. All switches and controls are internally smoothed, so they can be used 'live' whithout any clicks or zipper noises. This should make this plugin a good candidate for use in systems that allow automation of plugin control ports, such as Ardour, or for stage use. The DSP code is written by Fons Adriaensen = GUI = The GUI provides knobs and toggle buttons for tweaking filter parameters. It also provides frequency response widget with differently coloured curve for each section and separate curve for total equalization effect. The GUI uses the External UI extension. lv2rack (part of zynjacku) supports this extension. Ardour-2.8 needs patch to support the external UI extension. lv2fil-2.0+20100312.git18130f5a+dfsg0/COPYING0000644000175000017500000004311011346337452017131 0ustar alessioalessio GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2_external_ui.h0000644000175000017500000000612411346337452021355 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * This work is in public domain. * * This file is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * If you have questions, contact Nedko Arnaudov or * ask in #lad channel, FreeNode IRC network. * *****************************************************************************/ #ifndef LV2_EXTERNAL_UI_H__5AFE09A5_0FB7_47AF_924E_2AF0F8DE8873__INCLUDED #define LV2_EXTERNAL_UI_H__5AFE09A5_0FB7_47AF_924E_2AF0F8DE8873__INCLUDED /** UI extension suitable for out-of-process UIs */ #define LV2_EXTERNAL_UI_URI "http://lv2plug.in/ns/extensions/ui#external" #ifdef __cplusplus extern "C" { #endif #if 0 } /* Adjust editor indent */ #endif /** * When LV2_EXTERNAL_UI_URI UI is instantiated, the returned * LV2UI_Widget handle must be cast to pointer to struct lv2_external_ui. * UI is created in invisible state. */ struct lv2_external_ui { /** * Host calls this function regulary. UI library implementing the * callback may do IPC or redraw the UI. * * @param _this_ the UI context */ void (* run)(struct lv2_external_ui * _this_); /** * Host calls this function to make the plugin UI visible. * * @param _this_ the UI context */ void (* show)(struct lv2_external_ui * _this_); /** * Host calls this function to make the plugin UI invisible again. * * @param _this_ the UI context */ void (* hide)(struct lv2_external_ui * _this_); }; #define LV2_EXTERNAL_UI_RUN(ptr) (ptr)->run(ptr) #define LV2_EXTERNAL_UI_SHOW(ptr) (ptr)->show(ptr) #define LV2_EXTERNAL_UI_HIDE(ptr) (ptr)->hide(ptr) /** * On UI instantiation, host must supply LV2_EXTERNAL_UI_URI * feature. LV2_Feature::data must be pointer to struct lv2_external_ui_host. */ struct lv2_external_ui_host { /** * Callback that plugin UI will call * when UI (GUI window) is closed by user. * This callback wil; be called during execution of lv2_external_ui::run() * (i.e. not from background thread). * * After this callback is called, UI is defunct. Host must call * LV2UI_Descriptor::cleanup(). If host wants to make the UI visible * again UI must be reinstantiated. * * @param controller Host context associated with plugin UI, as * supplied to LV2UI_Descriptor::instantiate() */ void (* ui_closed)(LV2UI_Controller controller); /** * Optional (may be NULL) "user friendly" identifier which the UI * may display to allow a user to easily associate this particular * UI instance with the correct plugin instance as it is represented * by the host (e.g. "track 1" or "channel 4"). * * If supplied by host, the string will be referenced only during * LV2UI_Descriptor::instantiate() */ const char * plugin_human_id; }; #if 0 { /* Adjust editor indent */ #endif #ifdef __cplusplus } /* extern "C" */ #endif #endif /* #ifndef LV2_EXTERNAL_UI_H__5AFE09A5_0FB7_47AF_924E_2AF0F8DE8873__INCLUDED */ lv2fil-2.0+20100312.git18130f5a+dfsg0/.gitignore0000644000175000017500000000005611346337452020070 0ustar alessioalessio*.o *.so /.waf-* /build /.lock-wscript /*.pyc lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2_ui.h0000644000175000017500000004334111346337452017455 0ustar alessioalessio/************************************************************************ * * In-process UI extension for LV2 * * Copyright (C) 2006-2008 Lars Luthman * * Based on lv2.h, which was * * Copyright (C) 2000-2002 Richard W.E. Furse, Paul Barton-Davis, * Stefan Westerfeld * Copyright (C) 2006 Steve Harris, Dave Robillard. * * This header is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; either version 2.1 of the License, * or (at your option) any later version. * * This header is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA. * ***********************************************************************/ /** @file This extension defines an interface that can be used in LV2 plugins and hosts to create UIs for plugins. The UIs are plugins that reside in shared object files in an LV2 bundle and are referenced in the RDF data using the triples (Turtle shown)
    
    @@prefix uiext:  .
        uiext:ui      .
        a            uiext:GtkUI .
      uiext:binary  .
where is the URI of the plugin, is the URI of the plugin UI and is the relative URI to the shared object file. While it is possible to have the plugin UI and the plugin in the same shared object file it is probably a good idea to keep them separate so that hosts that don't want UIs don't have to load the UI code. A UI MUST specify its class in the RDF data, in this case uiext:GtkUI. The class defines what type the UI is, e.g. what graphics toolkit it uses. There are no UI classes defined in this extension, those are specified separately (and anyone can define their own). (Note: the prefix above is used throughout this file for the same URI) It's entirely possible to have multiple UIs for the same plugin, or to have the UI for a plugin in a different bundle from the actual plugin - this way people other than the plugin author can write plugin UIs independently without editing the original plugin bundle. Note that the process that loads the shared object file containing the UI code and the process that loads the shared object file containing the actual plugin implementation does not have to be the same. There are many valid reasons for having the plugin and the UI in different processes, or even on different machines. This means that you can _not_ use singletons and global variables and expect them to refer to the same objects in the UI and the actual plugin. The function callback interface defined in this header is all you can expect to work. Since the LV2 specification itself allows for extensions that may add new types of data and configuration parameters that plugin authors may want to control with a UI, this extension allows for meta-extensions that can extend the interface between the UI and the host. These extensions mirror the extensions used for plugins - there are required and optional "features" that you declare in the RDF data for the UI as
    
     uiext:requiredFeature  .
     uiext:optionalFeature  .
These predicates have the same semantics as lv2:requiredFeature and lv2:optionalFeature - if a UI is declaring a feature as required, the host is NOT allowed to load it unless it supports that feature, and if it does support a feature (required or optional) it MUST pass that feature's URI and any additional data (specified by the meta-extension that defines the feature) in a LV2_Feature struct (as defined in lv2.h) to the UI's instantiate() function. These features may be used to specify how to pass data between the UI and the plugin port buffers - see LV2UI_Write_Function for details. There are four features defined in this extension that hosts may want to implement:
    uiext:makeResident
If this feature is required by a UI the host MUST NEVER unload the shared library containing the UI implementation during the lifetime of the host process (e.g. never calling dlclose() on Linux). This feature may be needed by e.g. a Gtk UI that registers its own Glib types using g_type_register_static() - if it gets unloaded and then loaded again the type registration will break, since there is no way to unregister the types when the library is unloaded. The data pointer in the LV2_Feature for this feature should always be set to NULL.
    uiext:makeSONameResident
This feature is ELF specific - it should only be used by UIs that use the ELF file format for the UI shared object files (e.g. on Linux). If it is required by an UI the UI should also list a number of SO names (shared object names) for libraries that the UI shared object depends on and that may not be unloaded during the lifetime of the host process, using the predicate @c uiext:residentSONames, like this:
     uiext:residentSONames "libgtkmm-2.4.so.1", "libfoo.so.0"
The host MUST then make sure that the shared libraries with the given ELF SO names are not unloaded when the plugin UI is, but stay loaded during the entire lifetime of the host process. On Linux this can be accomplished by calling dlopen() on the shared library file with that SO name and never calling a matching dlclose(). However, if a plugin UI requires the @c uiext:makeSONameResident feature, it MUST ALWAYS be safe for the host to just never unload the shared object containing the UI implementation, i.e. act as if the UI required the @c uiext:makeResident feature instead. Thus the host only needs to find the shared library files corresponding to the given SO names if it wants to save RAM by unloading the UI shared object file when it is no longer needed. The data pointer for the LV2_Feature for this feature should always be set to NULL.
    uiext:noUserResize
If an UI requires this feature it indicates that it does not make sense to let the user resize the main widget, and the host should prevent that. This feature may not make sense for all UI types. The data pointer for the LV2_Feature for this feature should always be set to NULL.
    uiext:fixedSize
If an UI requires this feature it indicates the same thing as uiext:noUserResize, and additionally it means that the UI will not resize the main widget on its own - it will always remain the same size (e.g. a pixmap based GUI). This feature may not make sense for all UI types. The data pointer for the LV2_Feature for this feature should always be set to NULL. UIs written to this specification do not need to be threadsafe - the functions defined below may only be called in the same thread as the UI main loop is running in. Note that this UI extension is NOT a lv2:Feature. There is no way for a plugin to know whether the host that loads it supports UIs or not, and the plugin must ALWAYS work without the UI (although it may be rather useless unless it has been configured using the UI in a previous session). A UI does not have to be a graphical widget, it could just as well be a server listening for OSC input or an interface to some sort of hardware device, depending on the RDF class of the UI. */ #ifndef LV2_UI_H #define LV2_UI_H #include "lv2.h" #define LV2_UI_URI "http://lv2plug.in/ns/extensions/ui" #ifdef __cplusplus extern "C" { #endif /** A pointer to some widget or other type of UI handle. The actual type is defined by the type URI of the UI. All the functionality provided by this extension is toolkit independent, the host only needs to pass the necessary callbacks and display the widget, if possible. Plugins may have several UIs, in various toolkits. */ typedef void* LV2UI_Widget; /** This handle indicates a particular instance of a UI. It is valid to compare this to NULL (0 for C++) but otherwise the host MUST not attempt to interpret it. The UI plugin may use it to reference internal instance data. */ typedef void* LV2UI_Handle; /** This handle indicates a particular plugin instance, provided by the host. It is valid to compare this to NULL (0 for C++) but otherwise the UI plugin MUST not attempt to interpret it. The host may use it to reference internal plugin instance data. */ typedef void* LV2UI_Controller; /** This is the type of the host-provided function that the UI can use to send data to a plugin's input ports. The @c buffer parameter must point to a block of data, @c buffer_size bytes large. The contents of this buffer and what the host should do with it depends on the value of the @c format parameter. The @c format parameter should either be 0 or a numeric ID for a "Transfer mechanism". Transfer mechanisms are Features and may be defined in meta-extensions. They specify how to translate the data buffers passed to this function to input data for the plugin ports. If a UI wishes to write data to an input port, it must list a transfer mechanism Feature for that port's class as an optional or required feature (depending on whether the UI will work without being able to write to that port or not). The only exception is when the UI wants to write single float values to input ports of the class lv2:ControlPort, in which case @c buffer_size should always be 4, the buffer should always contain a single IEEE-754 float, and @c format should be 0. The numeric IDs for the transfer mechanisms are provided by a URI-to-integer mapping function provided by the host, using the URI Map feature with the map URI "http://lv2plug.in/ns/extensions/ui". Thus a UI that requires transfer mechanism features also requires the URI Map feature, but this is implicit - the UI does not have to list the URI map feature as a required or optional feature in it's RDF data. An UI MUST NOT pass a @c format parameter value (except 0) that has not been returned by the host-provided URI mapping function for a host-supported transfer mechanism feature URI. The UI MUST NOT try to write to a port for which there is no specified transfer mechanism, or to an output port. The UI is responsible for allocating the buffer and deallocating it after the call. */ typedef void (*LV2UI_Write_Function)(LV2UI_Controller controller, uint32_t port_index, uint32_t buffer_size, uint32_t format, const void* buffer); /** This struct contains the implementation of an UI. A pointer to an object of this type is returned by the lv2ui_descriptor() function. */ typedef struct _LV2UI_Descriptor { /** The URI for this UI (not for the plugin it controls). */ const char* URI; /** Create a new UI object and return a handle to it. This function works similarly to the instantiate() member in LV2_Descriptor. @param descriptor The descriptor for the UI that you want to instantiate. @param plugin_uri The URI of the plugin that this UI will control. @param bundle_path The path to the bundle containing the RDF data file that references this shared object file, including the trailing '/'. @param write_function A function provided by the host that the UI can use to send data to the plugin's input ports. @param controller A handle for the plugin instance that should be passed as the first parameter of @c write_function. @param widget A pointer to an LV2UI_Widget. The UI will write a widget pointer to this location (what type of widget depends on the RDF class of the UI) that will be the main UI widget. @param features An array of LV2_Feature pointers. The host must pass all feature URIs that it and the UI supports and any additional data, just like in the LV2 plugin instantiate() function. Note that UI features and plugin features are NOT necessarily the same, they just share the same data structure - this will probably not be the same array as the one the plugin host passes to a plugin. */ LV2UI_Handle (*instantiate)(const struct _LV2UI_Descriptor* descriptor, const char* plugin_uri, const char* bundle_path, LV2UI_Write_Function write_function, LV2UI_Controller controller, LV2UI_Widget* widget, const LV2_Feature* const* features); /** Destroy the UI object and the associated widget. The host must not try to access the widget after calling this function. */ void (*cleanup)(LV2UI_Handle ui); /** Tell the UI that something interesting has happened at a plugin port. What is interesting and how it is written to the buffer passed to this function is defined by the @c format parameter, which has the same meaning as in LV2UI_Write_Function. The only exception is ports of the class lv2:ControlPort, for which this function should be called when the port value changes (it does not have to be called for every single change if the host's UI thread has problems keeping up with the thread the plugin is running in), @c buffer_size should be 4 and the buffer should contain a single IEEE-754 float. In this case the @c format parameter should be 0. By default, the host should only call this function for input ports of the lv2:ControlPort class. However, the default setting can be modified by using the following URIs in the UI's RDF data:
      uiext:portNotification
      uiext:noPortNotification
      uiext:plugin
      uiext:portIndex
      
For example, if you want the UI with uri for the plugin with URI to get notified when the value of the output control port with index 4 changes, you would use the following in the RDF for your UI:
       uiext:portNotification [ uiext:plugin  ;
                                                      uiext:portIndex 4 ] .
      
and similarly with uiext:noPortNotification if you wanted to prevent notifications for a port for which it would be on by default otherwise. The UI is not allowed to request notifications for ports of types for which no transfer mechanism is specified, if it does it should be considered broken and the host should not load it. The @c buffer is only valid during the time of this function call, so if the UI wants to keep it for later use it has to copy the contents to an internal buffer. This member may be set to NULL if the UI is not interested in any port events. */ void (*port_event)(LV2UI_Handle ui, uint32_t port_index, uint32_t buffer_size, uint32_t format, const void* buffer); /** Returns a data structure associated with an extension URI, for example a struct containing additional function pointers. Avoid returning function pointers directly since standard C++ has no valid way of casting a void* to a function pointer. This member may be set to NULL if the UI is not interested in supporting any extensions. This is similar to the extension_data() member in LV2_Descriptor. */ const void* (*extension_data)(const char* uri); } LV2UI_Descriptor; /** A plugin UI programmer must include a function called "lv2ui_descriptor" with the following function prototype within the shared object file. This function will have C-style linkage (if you are using C++ this is taken care of by the 'extern "C"' clause at the top of the file). This function will be accessed by the UI host using the @c dlsym() function and called to get a LV2UI_UIDescriptor for the wanted plugin. Just like lv2_descriptor(), this function takes an index parameter. The index should only be used for enumeration and not as any sort of ID number - the host should just iterate from 0 and upwards until the function returns NULL or a descriptor with an URI matching the one the host is looking for. */ const LV2UI_Descriptor* lv2ui_descriptor(uint32_t index); /** This is the type of the lv2ui_descriptor() function. */ typedef const LV2UI_Descriptor* (*LV2UI_DescriptorFunction)(uint32_t index); #ifdef __cplusplus } #endif #endif lv2fil-2.0+20100312.git18130f5a+dfsg0/filter.ttl0000644000175000017500000002620511346337452020116 0ustar alessioalessio@prefix lv2: . @prefix rdfs: . @prefix doap: . @prefix llext: . @prefix foaf: . @prefix ue: . @prefix epp: . @prefix uiext: . a uiext:external; uiext:binary . a lv2:Plugin; a lv2:FilterPlugin; doap:maintainer [ foaf:name "Nedko Arnaudov"; foaf:homepage ; foaf:mbox ; ]; doap:name "4-band parametric filter (Mono)"; doap:license ; uiext:ui ; lv2:port [ a lv2:InputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 0; lv2:symbol "in"; lv2:name "Audio input"; ], [ a lv2:OutputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 1; lv2:symbol "out"; lv2:name "Audio output"; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 2; lv2:symbol "active"; lv2:name "Active"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 3; lv2:symbol "gain"; lv2:name "Gain"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 4; lv2:symbol "active1"; lv2:name "Active1"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 5; lv2:symbol "freq1"; lv2:name "Frequency 1"; lv2:default 200.0; lv2:minimum 20.0; lv2:maximum 2000.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 6; lv2:symbol "bandwidth1"; lv2:name "Bandwidth 1"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 7; lv2:symbol "gain1"; lv2:name "Gain 1"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 8; lv2:symbol "active2"; lv2:name "Active 2"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 9; lv2:symbol "frequency2"; lv2:name "Frequency 2"; lv2:default 400; lv2:minimum 40; lv2:maximum 4000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 10; lv2:symbol "bandwidth2"; lv2:name "Bandwidth 2"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 11; lv2:symbol "gain2"; lv2:name "Gain 2"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 12; lv2:symbol "active3"; lv2:name "Active 3"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 13; lv2:symbol "frequency3"; lv2:name "Frequency 3"; lv2:default 1000; lv2:minimum 100; lv2:maximum 10000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 14; lv2:symbol "bandwidth3"; lv2:name "Bandwidth 3"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 15; lv2:symbol "gain3"; lv2:name "Gain 3"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 16; lv2:symbol "active4"; lv2:name "Active 4"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 17; lv2:symbol "frequency4"; lv2:name "Frequency 4"; lv2:default 2000; lv2:minimum 200; lv2:maximum 20000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 18; lv2:symbol "bandwidth4"; lv2:name "Bandwidth 4"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 19; lv2:symbol "gain4"; lv2:name "Gain 4"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ]. a lv2:Plugin; a lv2:FilterPlugin; doap:maintainer [ foaf:name "Nedko Arnaudov"; foaf:homepage ; foaf:mbox ; ]; doap:name "4-band parametric filter (Stereo)"; doap:license ; uiext:ui ; lv2:port [ a lv2:InputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 0; lv2:symbol "in_left"; lv2:name "left audio input"; ], [ a lv2:InputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 1; lv2:symbol "in_right"; lv2:name "Right audio input"; ], [ a lv2:OutputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 2; lv2:symbol "out_left"; lv2:name "Left audio output"; ], [ a lv2:OutputPort, lv2:AudioPort; lv2:datatype lv2:float; lv2:index 3; lv2:symbol "out_right"; lv2:name "Right audio output"; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 4; lv2:symbol "active"; lv2:name "Active"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 5; lv2:symbol "gain"; lv2:name "Gain"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 6; lv2:symbol "active1"; lv2:name "Active1"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 7; lv2:symbol "freq1"; lv2:name "Frequency 1"; lv2:default 200.0; lv2:minimum 20.0; lv2:maximum 2000.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 8; lv2:symbol "bandwidth1"; lv2:name "Bandwidth 1"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 9; lv2:symbol "gain1"; lv2:name "Gain 1"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 10; lv2:symbol "active2"; lv2:name "Active 2"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 11; lv2:symbol "frequency2"; lv2:name "Frequency 2"; lv2:default 400; lv2:minimum 40; lv2:maximum 4000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 12; lv2:symbol "bandwidth2"; lv2:name "Bandwidth 2"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 13; lv2:symbol "gain2"; lv2:name "Gain 2"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 14; lv2:symbol "active3"; lv2:name "Active 3"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 15; lv2:symbol "frequency3"; lv2:name "Frequency 3"; lv2:default 1000; lv2:minimum 100; lv2:maximum 10000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 16; lv2:symbol "bandwidth3"; lv2:name "Bandwidth 3"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 17; lv2:symbol "gain3"; lv2:name "Gain 3"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 18; lv2:symbol "active4"; lv2:name "Active 4"; lv2:default 0.0; lv2:minimum 0.0; lv2:maximum 1.0; lv2:portProperty lv2:toggled; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 19; lv2:symbol "frequency4"; lv2:name "Frequency 4"; lv2:default 2000; lv2:minimum 200; lv2:maximum 20000; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ue:unit ue:hz; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 20; lv2:symbol "bandwidth4"; lv2:name "Bandwidth 4"; lv2:default 1.0; lv2:minimum 0.125; lv2:maximum 8.0; lv2:portProperty epp:hasStrictBounds; lv2:portProperty epp:logarithmic; ], [ a lv2:InputPort; a lv2:ControlPort; lv2:index 21; lv2:symbol "gain4"; lv2:name "Gain 4"; lv2:default 0.0; lv2:minimum -20.0; lv2:maximum 20.0; lv2:portProperty epp:hasStrictBounds; ue:unit ue:db; ]. lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2plugin.c0000644000175000017500000000433411346337452020171 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * Copyright (C) 2006,2007,2008,2009 Nedko Arnaudov * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * *****************************************************************************/ #include #include #include "lv2filter.h" //#define LOG_LEVEL LOG_LEVEL_DEBUG #include "log.h" static LV2_Descriptor g_lv2_plugins[] = { { .URI = LV2FILTER_MONO_URI, .instantiate = lv2filter_instantiate, .connect_port = lv2filter_connect_port, .run = lv2filter_run, .cleanup = lv2filter_cleanup, .extension_data = lv2filter_extension_data }, { .URI = LV2FILTER_STEREO_URI, .instantiate = lv2filter_instantiate, .connect_port = lv2filter_connect_port, .run = lv2filter_run, .cleanup = lv2filter_cleanup, .extension_data = lv2filter_extension_data }, { .URI = NULL } }; static int g_lv2_plugins_count; void lv2_initialise() __attribute__((constructor)); void lv2_initialise() { const LV2_Descriptor * descr_ptr; LOG_DEBUG("lv2_initialise() called."); descr_ptr = g_lv2_plugins; while (descr_ptr->URI != NULL) { g_lv2_plugins_count++; descr_ptr++; } } const LV2_Descriptor* lv2_descriptor(uint32_t index) { LOG_DEBUG("lv2_descriptor(%u) called.", (unsigned int)index); if (index >= g_lv2_plugins_count) { LOG_DEBUG("plugin at index %u not found.", (unsigned int)index); return NULL; } LOG_DEBUG("<%s> found.", g_lv2_plugins[index].URI); return g_lv2_plugins + index; } lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2logo.png0000644000175000017500000001312511346337452020173 0ustar alessioalessioPNG  IHDRd?m`sBIT|dtEXtSoftwarewww.inkscape.org<IDATx]{p]ysޖeF`ɴqeJ0%C$Єt0 IcH0Ђ-K,Ll 2eYɶd[~ݳ+ɱ)ZY,)\r+VOF]98{q1Gy"SIyOEnY*=qG9vQ3R4+ e裏|kSfe|+#3Ӳm,(C]*^\;uW^OuL͗/lL!1%0sΝ /<wQWA!r6y툭x*x=Y ̥$aR@BƦ(.L> ~ڶ ˲PZ6מSZ[n_{eYFDhbYV Nìٳ-7*f$s !6K6MkR Ev},T C0",8g サ?3[ 5HEEEu)`X(  > 85(%#Wg`#h4tzm۲?쳏/A`Yׯܑ;+bkަ `PM+1Cm\>9Ȕ9T7IOe$OzZy˲vѧ,+o&R_e5yK\J@wvtt4ZUED< c\ 0O "E 3*TO2=\ c]QWhSpkD42ע\f~s=4֜Tegx㍋hzz9%6s%} pCcCQL0ՈqGMM!si@+b"l\eYK.]MK,p茌 x/!QMym}@^90 WLw~$p ORpMr., s$qxzBzy0-5Y/ÍbͅAx aԠAT ;==,WF̙NJ+^g3Q'NK##:}F n(7OQ2蘡eFi Rt"U xaw`L7-[D"a  <]}~AIQPP3;9w<~d0?8A8appEضFx  .=\KG \zF iii xP\Tmjahooǁ>ą !R3"DžEQ<WBOԇcFhlO/Ι޼2:cɻ0!MCruQLLYPh$jTWWe~/Bpow#DMgnLd¸Q/+9Upp7BD6pS]I R1cf2~O>!IIsߵlv.h)[nFyr/+E$sHR>#B<?\aң)=GǑ9)͊y8sV[/J+(eWu¼ys CP]0c5󑕝4Z;\Ѕl"}e 8.] `F$5UKET,6L?]}U9ZMIG\)dd+AjaYV&o8hmkC3HuUU`Xz SKe qB|bBET׻ z9OªM4SݟIi/Y߫IeQiI8 rͅ,KBݻa"'3wߵ_>-8p!Yp2W$~%IɴPnŦ.ɽ MTATnGUd_6 3^! θL0f=}8v~e܏8wO(eQhOLY@KGA,6Addfb*ٻ`C#BNv62AS=Kcǰv|M BsskPru>'!ҢQTW+)"@`It12<'OhVqC `dt ]ݧq4Nuw!60(ck֬w2.b=!bFV]#i^G8zjkk5Jښ444*FS됖Wݽzk.LL8 01:6p|hpjr$!nXvs}dRԫ RLH #t@qa Uۂ̝;'?3;O[lY.X]Vi܍q cddsу6gvuCSߢx@kbʦ#%aR>ըC@ɓxAlX% Qnx\xLs…-ik.8,Q addc.G#" d6;ލXlOq: EAxĔ=>[`a;n DRb DOQs"O̙sX˸{te l~UNMoSlq<>0\ZohJJl;k'3+UUU:!nf՟ `q8;w5\/ EX_f-^ױu!ل/>Q0T{(͸dHŔzMOѣ^XØOв,X,XͯE4A$g& T_%3 lys TT7==O'O~#Gkk++ ž/ wc7{uQ~r$P Bjs< Y rg䓰^N?p۪#l-ohrEQ>zuW/*!QqǏc` \wu bS7=QRPԵa4Sp4l7Me.\~ B~24y¬Aım[6{= Ic{+q+?.~{ڞTS"OFzc642K nxOP{\hbʷ=&K#Ӣظ?"DVv6n[ 6ީ˖ij0::" ;:؆||շ2n"}=\oTz|lM;==gvMC&K#>9eT&ڶ-ohq^>*&~3H>v gY~ i?P^~5GIQ>#Gqj.ҩ9 ޻{ ?{wG$uxN#,m.LM|5=$&I#jzJQuͮ&ǓYןk7{߽W+2=)j(vz9><2=!y۔Ԁ7(2nxc2Ɂ N`Z.ǦJLWd`+Oɳp?uA+./L9.Px;̳M+Wߎ4k8qNZj{cر~z:_R6vИ69qGC}R6o59ŅsX^ /*,ܳXwx'?> :^ez~&_F7,_¦mYQ4m}xW$ ι~ ՈD#a߾=qBĉ1 ?e/PHTRGFO)+<@A]ؘg4a9?r^ UR_0HbIM~fVΔ J5\(P"4Uuz H*@/AcDwMDGLJ>'󖑡Ĕ+.}Q;G~HyMg1czm"j;?uwMs~w\Ob|lKչ^18K|Qs ~:h$ )۶WR~M$;w&=2!wɫ-" -zض_dK+.gW\qh@#A1%5$\d]>7k>:tQ * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * *****************************************************************************/ #ifndef LOG_H__7097F6FE_4FEE_4962_9542_60375961F567__INCLUDED #define LOG_H__7097F6FE_4FEE_4962_9542_60375961F567__INCLUDED void lv2log(int level, const char * format, ...); #define LOG_LEVEL_DEBUG 0 #define LOG_LEVEL_INFO 1 #define LOG_LEVEL_WARNING 2 #define LOG_LEVEL_NOTICE 3 #define LOG_LEVEL_ERROR 4 #define LOG_LEVEL_FATAL 5 #define LOG_LEVEL_BLACK_HOLE 6 #if !defined(LOG_LEVEL) #define LOG_LEVEL LOG_LEVEL_WARNING #endif #if LOG_LEVEL <= LOG_LEVEL_DEBUG # define LOG_DEBUG(format, arg...) \ lv2log(LOG_LEVEL_DEBUG, \ format "\n", ## arg) #else # define LOG_DEBUG(format, arg...) #endif #if LOG_LEVEL <= LOG_LEVEL_INFO # define LOG_INFO(format, arg...) \ lv2log(LOG_LEVEL_INFO, \ format "\n", ## arg) #else # define LOG_INFO(format, arg...) #endif #if LOG_LEVEL <= LOG_LEVEL_WARNING # define LOG_WARNING(format, arg...) \ lv2log(LOG_LEVEL_WARNING, \ format "\n", ## arg) #else # define LOG_WARNING(format, arg...) #endif #if LOG_LEVEL <= LOG_LEVEL_NOTICE # define LOG_NOTICE(format, arg...) \ lv2log(LOG_LEVEL_NOTICE, \ format "\n", ## arg) #else # define LOG_NOTICE(format, arg...) #endif #if LOG_LEVEL <= LOG_LEVEL_ERROR # define LOG_ERROR(format, arg...) \ lv2log(LOG_LEVEL_ERROR, \ format "\n", ## arg) #else # define LOG_ERROR(format, arg...) #endif #if LOG_LEVEL <= LOG_LEVEL_FATAL # define LOG_FATAL(format, arg...) \ lv2log(LOG_LEVEL_FATAL, \ format "\n", ## arg) #else # define LOG_FATAL(format, arg...) #endif #endif /* #ifndef LOG_H__7097F6FE_4FEE_4962_9542_60375961F567__INCLUDED */ lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2plugin.py0000644000175000017500000001033111346337452020371 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 # # Copyright (C) 2008,2009 Nedko Arnaudov # # waf tool for lv2 plugins import Object from Object import taskgen, after, before, feature from Common import install_files import os import Params import shutil from Configure import g_maxlen #g_maxlen = 40 def display_msg(msg, status = None, color = None): sr = msg global g_maxlen g_maxlen = max(g_maxlen, len(msg)) if status: print "%s :" % msg.ljust(g_maxlen), Params.pprint(color, status) else: print "%s" % msg.ljust(g_maxlen) def get_lv2_install_dir(): envvar = 'LV2_PATH' has_lv2_path = os.environ.has_key(envvar) if has_lv2_path: display_msg("Checking LV2_PATH") else: display_msg("Checking LV2_PATH", "not set", 'YELLOW') return None if has_lv2_path: lv2paths = os.environ[envvar].split(':') for lv2path in lv2paths: if not os.path.isdir(lv2path): display_msg(' ' + lv2path, 'not directory!', 'YELLOW') continue if not os.access(lv2path, os.W_OK): display_msg(' ' + lv2path, 'not writable', 'YELLOW') continue display_msg(' ' + lv2path, 'looks good', 'GREEN') return lv2path return None class lv2plugin_proxy_abstract(Object.task_gen): def __init__(self, tool, hook): Object.task_gen.__init__(self) self.tool = tool self.hook = hook def the_hook(self, obj, node): #print "-------------- '%s'" % node #print "tool '%s'" % self.tool #print "tool.target '%s'" % self.tool.target #print "hook '%s'" % self.hook #print "obj '%s'" % obj #print "self '%s'" % self self.hook(self.tool, node) class lv2plugin_taskgen(Object.task_gen): def __init__(self, type = 'cpp', env=None): Object.task_gen.__init__(self) self.type = type self.tool = Object.task_gen.classes[type]('shlib') if type == 'cpp': self.tool.m_type_initials = 'cpp' self.tool.features.append('cc') self.tool.ccflags = '' self.tool.mappings['.c'] = Object.task_gen.mappings['.cc'] def apply_core(self): #print "lv2plugin.apply_core() called." #print "sources: " + repr(self.source) #print "target: '%s'" % self.target #print "ttl: '%s'" % self.ttl self.tool.target = self.target self.tool.env['shlib_PATTERN'] = '%s.so' self.tool.uselib = self.uselib self.tool.ttl = self.ttl self.tool.lv2 = True Object.task_gen.apply_core(self) def get_hook(self, ext): classes = Object.task_gen.classes for cls in classes.keys(): if cls == 'lv2plugin': continue if cls != self.type: continue map = classes[cls].mappings for x in map: if x == ext: hook = map[x] obj = lv2plugin_proxy_abstract(self.tool, hook) return obj.the_hook return None def set_options(opt): opt.add_option('--lv2-dir', type='string', default='', dest='LV2_INSTALL_DIR', help='Force directory where LV2 plugin(s) will be installed.') def detect(conf): conf.env['LV2_INSTALL_DIR'] = getattr(Params.g_options, 'LV2_INSTALL_DIR') status = conf.env['LV2_INSTALL_DIR'] if not status: status = 'will be deduced from LV2_PATH' display_msg('LV2 installation directory', status, 'GREEN') @taskgen @feature('normal') @after('apply_objdeps') @before('install_target') def install_lv2(self): if not getattr(self, 'lv2', None): return self.meths.remove('install_target') if not Params.g_install: return if not self.env['LV2_INSTALL_DIR']: self.env['LV2_INSTALL_DIR'] = get_lv2_install_dir() if not self.env['LV2_INSTALL_DIR']: Params.fatal('Cannot locate LV2 plugins directory') display_msg('LV2 installation directory', self.env['LV2_INSTALL_DIR'], 'GREEN') bundle_files = self.ttl bundle_files.append(self.target + '.so') install_files('LV2_INSTALL_DIR', self.target + '.lv2', bundle_files, self.env) lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2_ui.c0000644000175000017500000003224111346337452017445 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * Copyright (C) 2009 Nedko Arnaudov * * LV2 UI bundle shared library for communicating with a DSSI UI * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of * the License, or (at your option) any later version. * * This program is distributed in the hope that it will be * useful, but WITHOUT ANY WARRANTY; without even the implied * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR * PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program; if not, write to the Free * Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307, USA. * *****************************************************************************/ #define UI_EXECUTABLE "ui" #define UI_URI "http://nedko.aranaudov.org/soft/filter/2/gui" #define WAIT_START_TIMEOUT 3000 /* ms */ #define WAIT_ZOMBIE_TIMEOUT 3000 /* ms */ #define WAIT_STEP 100 /* ms */ //#define FORK_TIME_MEASURE #define USE_VFORK //#define USE_CLONE //#define USE_CLONE2 #if defined(USE_VFORK) #define FORK vfork #define FORK_STR "vfork" #elif defined(USE_CLONE) #define FORK_STR "clone" #elif defined(USE_CLONE2) #define FORK_STR "clone2" #else #define FORK fork #define FORK_STR "fork" #endif #include #include #include #include #include #include #if defined(FORK_TIME_MEASURE) # include #endif #include #if defined(USE_CLONE) || defined(USE_CLONE2) # include #endif #include #include #include #include #include "lv2_ui.h" #include "lv2_external_ui.h" struct control { struct lv2_external_ui virt; /* WARNING: code assumes this is the first struct member */ LV2UI_Controller controller; LV2UI_Write_Function write_function; void (* ui_closed)(LV2UI_Controller controller); bool running; /* true if UI launched and 'exiting' not received */ bool visible; /* true if 'show' sent */ int send_pipe; /* the pipe end that is used for sending messages to UI */ int recv_pipe; /* the pipe end that is used for receiving messages from UI */ pid_t pid; }; static char * read_line( struct control * control_ptr) { ssize_t ret; char ch; char buf[100]; char * ptr; ptr = buf; loop: ret = read(control_ptr->recv_pipe, &ch, 1); if (ret == 1 && ch != '\n') { *ptr++ = ch; goto loop; } if (ptr != buf) { *ptr = 0; //printf("recv: \"%s\"\n", buf); return strdup(buf); } return NULL; } static bool wait_child( pid_t pid) { pid_t ret; int i; if (pid == -1) { fprintf(stderr, "Can't wait for pid -1\n"); return false; } for (i = 0; i < WAIT_ZOMBIE_TIMEOUT / WAIT_STEP; i++) { //printf("waitpid(%d): %d\n", (int)pid, i); ret = waitpid(pid, NULL, WNOHANG); if (ret != 0) { if (ret == pid) { //printf("child zombie with pid %d was consumed.\n", (int)pid); return true; } if (ret == -1) { fprintf(stderr, "waitpid(%d) failed: %s\n", (int)pid, strerror(errno)); return false; } fprintf(stderr, "we have waited for child pid %d to exit but we got pid %d instead\n", (int)pid, (int)ret); return false; } //printf("zombie wait %d ms ...\n", WAIT_STEP); usleep(WAIT_STEP * 1000); /* wait 100 ms */ } fprintf( stderr, "we have waited for child with pid %d to exit for %.1f seconds and we are giving up\n", (int)pid, (float)((float)WAIT_START_TIMEOUT / 1000)); return false; } #define control_ptr ((struct control *)_this_) static void run( struct lv2_external_ui * _this_) { char * msg; char * port_index_str; char * port_value_str; int port; float value; char * locale; //printf("run() called\n"); msg = read_line(control_ptr); if (msg == NULL) { return; } locale = strdup(setlocale(LC_NUMERIC, NULL)); setlocale(LC_NUMERIC, "POSIX"); if (!strcmp(msg, "port_value")) { port_index_str = read_line(control_ptr); port_value_str = read_line(control_ptr); port = atoi(port_index_str); if (sscanf(port_value_str, "%f", &value) == 1) { //printf("port %d = %f\n", port, value); control_ptr->write_function(control_ptr->controller, (uint32_t)port, sizeof(float), 0, &value); } else { fprintf(stderr, "failed to convert \"%s\" to float\n", port_value_str); } free(port_index_str); free(port_value_str); } else if (!strcmp(msg, "exiting")) { //printf("got UI exit notification\n"); /* for a while wait child to exit, we dont like zombie processes */ if (!wait_child(control_ptr->pid)) { fprintf(stderr, "force killing misbehaved child %d (exit)\n", (int)control_ptr->pid); if (kill(control_ptr->pid, SIGKILL) == -1) { fprintf(stderr, "kill() failed: %s (exit)\n", strerror(errno)); } else { wait_child(control_ptr->pid); } } control_ptr->running = false; control_ptr->visible = false; control_ptr->ui_closed(control_ptr->controller); } else { printf("unknown message: \"%s\"\n", msg); } setlocale(LC_NUMERIC, locale); free(locale); free(msg); } static void show( struct lv2_external_ui * _this_) { //printf("show() called\n"); if (control_ptr->visible) { return; } write(control_ptr->send_pipe, "show\n", 5); control_ptr->visible = true; } static void hide( struct lv2_external_ui * _this_) { //printf("hide() called\n"); if (!control_ptr->visible) { return; } write(control_ptr->send_pipe, "hide\n", 5); control_ptr->visible = false; } #undef control_ptr #if defined(FORK_TIME_MEASURE) static uint64_t get_current_time() { struct timeval time; if (gettimeofday(&time, NULL) != 0) return 0; return (uint64_t)time.tv_sec * 1000000 + (uint64_t)time.tv_usec; } #define FORK_TIME_MEASURE_VAR_NAME ____t #define FORK_TIME_MEASURE_VAR uint64_t FORK_TIME_MEASURE_VAR_NAME #define FORK_TIME_MEASURE_BEGIN FORK_TIME_MEASURE_VAR_NAME = get_current_time() #define FORK_TIME_MEASURE_END(msg) \ { \ FORK_TIME_MEASURE_VAR_NAME = get_current_time() - FORK_TIME_MEASURE_VAR_NAME; \ fprintf(stderr, msg ": %llu us\n", (unsigned long long)FORK_TIME_MEASURE_VAR_NAME); \ } #else #define FORK_TIME_MEASURE_VAR #define FORK_TIME_MEASURE_BEGIN #define FORK_TIME_MEASURE_END(msg) #endif #if defined(USE_CLONE) || defined(USE_CLONE2) static int clone_fn(void * context) { execvp(*(const char **)context, (char **)context); return -1; } #endif static LV2UI_Handle instantiate( const struct _LV2UI_Descriptor * descriptor, const char * plugin_uri, const char * bundle_path, LV2UI_Write_Function write_function, LV2UI_Controller controller, LV2UI_Widget * widget, const LV2_Feature * const * features) { struct control * control_ptr; struct lv2_external_ui_host * ui_host_ptr; char * filename; int pipe1[2]; /* written by host process, read by plugin UI process */ int pipe2[2]; /* written by plugin UI process, read by host process */ char ui_recv_pipe[100]; char ui_send_pipe[100]; int oldflags; FORK_TIME_MEASURE_VAR; const char * argv[8]; int ret; int i; char ch; //printf("instantiate('%s', '%s') called\n", plugin_uri, bundle_path); ui_host_ptr = NULL; while (*features != NULL) { if (strcmp((*features)->URI, LV2_EXTERNAL_UI_URI) == 0) { ui_host_ptr = (*features)->data; } features++; } if (ui_host_ptr == NULL) { goto fail; } control_ptr = malloc(sizeof(struct control)); if (control_ptr == NULL) { goto fail; } control_ptr->virt.run = run; control_ptr->virt.show = show; control_ptr->virt.hide = hide; control_ptr->controller = controller; control_ptr->write_function = write_function; control_ptr->ui_closed = ui_host_ptr->ui_closed; if (pipe(pipe1) != 0) { fprintf(stderr, "pipe1 creation failed.\n"); } if (pipe(pipe2) != 0) { fprintf(stderr, "pipe2 creation failed.\n"); } snprintf(ui_recv_pipe, sizeof(ui_recv_pipe), "%d", pipe1[0]); /* [0] means reading end */ snprintf(ui_send_pipe, sizeof(ui_send_pipe), "%d", pipe2[1]); /* [1] means writting end */ filename = malloc(strlen(bundle_path) + strlen(UI_EXECUTABLE) + 1); if (filename == NULL) { goto fail_free_control; } strcpy(filename, bundle_path); strcat(filename, UI_EXECUTABLE); control_ptr->running = false; control_ptr->visible = false; control_ptr->pid = -1; argv[0] = "python"; argv[1] = filename; argv[2] = plugin_uri; argv[3] = bundle_path; argv[4] = ui_host_ptr->plugin_human_id != NULL ? ui_host_ptr->plugin_human_id : ""; argv[5] = ui_recv_pipe; /* reading end */ argv[6] = ui_send_pipe; /* writting end */ argv[7] = NULL; FORK_TIME_MEASURE_BEGIN; #if defined(USE_CLONE) { int stack[8000]; ret = clone(clone_fn, stack + 4000, CLONE_VFORK, argv); if (ret == -1) { fprintf(stderr, "clone() failed: %s\n", strerror(errno)); goto fail_free_control; } } #elif defined(USE_CLONE2) fprintf(stderr, "clone2() exec not implemented yet\n"); goto fail_free_control; #else ret = FORK(); switch (ret) { case 0: /* child process */ /* fork duplicated the handles, close pipe ends that are used by parent process */ #if !defined(USE_VFORK) /* it looks we cant do this for vfork() */ close(pipe1[1]); close(pipe2[0]); #endif execvp(argv[0], (char **)argv); fprintf(stderr, "exec of UI failed: %s\n", strerror(errno)); exit(1); case -1: fprintf(stderr, "fork() failed to create new process for plugin UI\n"); goto fail_free_control; } #endif FORK_TIME_MEASURE_END(FORK_STR "() time"); //fprintf(stderr, FORK_STR "()-ed child process: %d\n", ret); control_ptr->pid = ret; /* fork duplicated the handles, close pipe ends that are used by the child process */ close(pipe1[0]); close(pipe2[1]); control_ptr->send_pipe = pipe1[1]; /* [1] means writting end */ control_ptr->recv_pipe = pipe2[0]; /* [0] means reading end */ oldflags = fcntl(control_ptr->recv_pipe, F_GETFL); fcntl(control_ptr->recv_pipe, F_SETFL, oldflags | O_NONBLOCK); /* wait a while for child process to confirm it is alive */ //printf("waiting UI start\n"); i = 0; loop: ret = read(control_ptr->recv_pipe, &ch, 1); switch (ret) { case -1: if (errno == EAGAIN) { if (i < WAIT_START_TIMEOUT / WAIT_STEP) { //printf("start wait %d ms ...\n", WAIT_STEP); usleep(WAIT_STEP * 1000); i++; goto loop; } fprintf( stderr, "we have waited for child with pid %d to appear for %.1f seconds and we are giving up\n", (int)control_ptr->pid, (float)((float)WAIT_START_TIMEOUT / 1000)); } else { fprintf(stderr, "read() failed: %s\n", strerror(errno)); } break; case 1: if (ch == '\n') { *widget = (LV2UI_Widget)control_ptr; return (LV2UI_Handle)control_ptr; } fprintf(stderr, "read() wrong first char '%c'\n", ch); break; default: fprintf(stderr, "read() returned %d\n", ret); } fprintf(stderr, "force killing misbehaved child %d (start)\n", (int)control_ptr->pid); if (kill(control_ptr->pid, SIGKILL) == -1) { fprintf(stderr, "kill() failed: %s (start)\n", strerror(errno)); } /* wait a while child to exit, we dont like zombie processes */ wait_child(control_ptr->pid); fail_free_control: free(control_ptr); fail: fprintf(stderr, "lv2fil UI launch failed\n"); return NULL; } #define control_ptr ((struct control *)ui) static void cleanup( LV2UI_Handle ui) { //printf("cleanup() called\n"); free(control_ptr); } static void port_event( LV2UI_Handle ui, uint32_t port_index, uint32_t buffer_size, uint32_t format, const void * buffer) { char buf[100]; int len; char * locale; //printf("port_event(%u, %f) called\n", (unsigned int)port_index, *(float *)buffer); locale = strdup(setlocale(LC_NUMERIC, NULL)); setlocale(LC_NUMERIC, "POSIX"); write(control_ptr->send_pipe, "port_value\n", 11); len = sprintf(buf, "%u\n", (unsigned int)port_index); write(control_ptr->send_pipe, buf, len); len = sprintf(buf, "%.10f\n", *(float *)buffer); write(control_ptr->send_pipe, buf, len); fsync(control_ptr->send_pipe); setlocale(LC_NUMERIC, locale); free(locale); } #undef control_ptr static LV2UI_Descriptor descriptors[] = { {UI_URI, instantiate, cleanup, port_event, NULL} }; const LV2UI_Descriptor * lv2ui_descriptor( uint32_t index) { //printf("lv2ui_descriptor(%u) called\n", (unsigned int)index); if (index >= sizeof(descriptors) / sizeof(descriptors[0])) { return NULL; } return descriptors + index; } lv2fil-2.0+20100312.git18130f5a+dfsg0/NEWS0000644000175000017500000000205511346337452016600 0ustar alessioalessio * Remove liblo and pyliblo dependencies - a misconfigured network setup will no longer prevent UI from working (the DSSI problem). Ticket #7 * Improve UI launching (vfork) - jack xruns/disconnects should not happen anymore. Ticket #9 * Handle missing runtime dependencies - hosts will not lock-up anymore. Ticket #10 * Consume child zombies. Ticket #8 = Version 2.0 on 2009-06-13 = * Display frequency response curves in the UI * GUI layout improvements * Switch to DSP code to one from FIL-plugins-0.3.0 and change URI to reflect parameter tweaks. The new filter will 'feel' different in particular the interaction between gain and bandwidth. This version is much closer to what you'll find in the best analog (and digital) mixers. But it's still the same filter, only the mapping of parameter values to responses has changed. * Fix restoring of plugin UI state = Version 1.1 on 2009-06-06 = * Two plugins, stereo and mono * Basic GUI = Version 1 on 2009-06-03 = * Initial release, GUI-less mono plugin based on FIL-plugins-0.1.0 lv2fil-2.0+20100312.git18130f5a+dfsg0/waf0000755000175000017500000000757711703032664016614 0ustar alessioalessio#! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2005-2008 """ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os, sys if sys.hexversion<0x203000f: raise "Waf requires Python >= 2.3" if 'PSYCOWAF' in os.environ: try:import psyco;psyco.full() except:pass VERSION="1.3.3" REVISION="6ab4e266aab677a9c822fc0e23df0098" INSTALL=sys.platform=='win32' and 'c:/temp' or '/usr/local' cwd = os.getcwd() join = os.path.join def decode(s): p1 = len(s) s += '!!!!!' w1 = [256**(3-u) for u in xrange(4)] w2 = [(u, 85**(4-u)) for u in xrange(5)] tot = [sum([(ord(s[i+m])-33) * n for (m, n) in w2]) for i in xrange(0, p1, 5)] return ''.join([chr((y/x) & 255) for y in tot for x in w1]) def err(m): print '\033[91mError: %s\033[0m' % m sys.exit(1) def unpack_wafdir(dir): f = open(sys.argv[0],'rb') c = "corrupted waf (%d)" while 1: line = f.readline() if not line: err("run waf-light from a folder containing wafadmin") if line == '#==>\n': txt = f.readline() if not txt: err(c % 1) if f.readline()!='#<==\n': err(c % 2) break if not txt: err(c % 3) try: txt = decode(txt[1:-1].replace('z', '!!!!!')) except: err(c % 4) import shutil, tarfile try: shutil.rmtree(dir) except OSError: pass try: os.makedirs(join(dir, 'wafadmin', 'Tools')) except OSError: err("Cannot unpack waf-local into %s Install waf system-wide or move waf into a writeable directory"%dir) os.chdir(dir) tmp = 't.tbz2' t = open(tmp,'wb') t.write(txt) t.close() t = tarfile.open(tmp) for x in t: t.extract(x) t.close() os.unlink(tmp) os.chdir(cwd) def test(dir): try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir) except OSError: pass def find_lib(): name = sys.argv[0] base = os.path.dirname(os.path.abspath(name)) #devs use $WAFDIR w=test(os.environ.get('WAFDIR', '')) if w: return w #waf-light if name.endswith('waf-light'): w = test(base) if w: return w err("waf-light requires wafadmin -> export WAFDIR=/folder") dir = "/lib/waf-%s-%s/" % (VERSION, REVISION) for i in [INSTALL,'/usr','/usr/local','/opt']: w = test(i+dir) if w: return w #waf-local s = '.waf-%s-%s' if sys.platform == 'win32': s = s[1:] dir = join(base, s % (VERSION, REVISION)) w = test(dir) if w: return w #unpack unpack_wafdir(dir) return dir wafdir = find_lib() if "-vv" in sys.argv: print "wafdir is %s" % wafdir w = join(wafdir, 'wafadmin') t = join(w, 'Tools') sys.path = [w, t] + sys.path import Params, Scripting Params.g_tooldir = [t] Params.g_cwd_launch = cwd if Params.g_version != VERSION: err('Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (VERSION, Params.g_version, wafdir)) Scripting.prepare() lv2fil-2.0+20100312.git18130f5a+dfsg0/filter.c0000644000175000017500000001634411346337452017540 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /* Copyright (C) 2008 Nedko Arnaudov The DSP code is based on ladspa:1970 by Fons Adriaensen This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* if NDEBUG is defined, assert checks are disabled */ //#define NDEBUG #include #include #include #include #include #include "filter.h" static float exp2ap(float x) { int i; i = (int)(floor(x)); x -= i; // return ldexp(1 + x * (0.66 + 0.34 * x), i); return ldexp(1 + x * (0.6930 + x * (0.2416 + x * (0.0517 + x * 0.0137))), i); } struct param_sect { float f, b, g; float s1, s2, a; float z1, z2; }; inline void param_sect_init( struct param_sect * sect_ptr) { sect_ptr->f = 0.25f; sect_ptr->b = sect_ptr->g = 1.0f; sect_ptr->a = sect_ptr->s1 = sect_ptr->s2 = sect_ptr->z1 = sect_ptr->z2 = 0.0f; } inline void param_sect_proc( struct param_sect * sect_ptr, int k, float * sig, float f, float b, float g) { float s1, s2, d1, d2, a, da, x, y; bool u2 = false; s1 = sect_ptr->s1; s2 = sect_ptr->s2; a = sect_ptr->a; d1 = 0; d2 = 0; da = 0; if (f != sect_ptr->f) { if (f < 0.5f * sect_ptr->f) f = 0.5f * sect_ptr->f; else if (f > 2.0f * sect_ptr->f) f = 2.0f * sect_ptr->f; sect_ptr->f = f; sect_ptr->s1 = -cosf(6.283185f * f); d1 = (sect_ptr->s1 - s1) / k; u2 = true; } if (g != sect_ptr->g) { if (g < 0.5f * sect_ptr->g) g = 0.5f * sect_ptr->g; else if (g > 2.0f * sect_ptr->g) g = 2.0f * sect_ptr->g; sect_ptr->g = g; sect_ptr->a = 0.5f * (g - 1.0f); da = (sect_ptr->a - a) / k; u2 = true; } if (b != sect_ptr->b) { if (b < 0.5f * sect_ptr->b) b = 0.5f * sect_ptr->b; else if (b > 2.0f * sect_ptr->b) b = 2.0f * sect_ptr->b; sect_ptr->b = b; u2 = true; } if (u2) { b *= 7 * f / sqrtf(g); sect_ptr->s2 = (1 - b) / (1 + b); d2 = (sect_ptr->s2 - s2) / k; } while (k--) { s1 += d1; s2 += d2; a += da; x = *sig; y = x - s2 * sect_ptr->z2; *sig++ -= a * (sect_ptr->z2 + s2 * y - x); y -= s1 * sect_ptr->z1; sect_ptr->z2 = sect_ptr->z1 + s1 * y; sect_ptr->z1 = y + 1e-10f; } } struct filter { float sample_rate; const float * global_parameters[GLOBAL_PARAMETERS_COUNT]; unsigned int bands_count; const float ** band_parameters; /* [band_index * BAND_PARAMETERS_COUNT + parameter_index] */ float gain; int fade; struct param_sect * sect; /* [band_index] */ }; bool filter_create( float sample_rate, unsigned int bands_count, filter_handle * handle_ptr) { struct filter * filter_ptr; int j; assert(bands_count > 0); filter_ptr = calloc(1, sizeof(struct filter)); if (filter_ptr == NULL) { goto fail; } filter_ptr->band_parameters = calloc(bands_count, sizeof(float *) * BAND_PARAMETERS_COUNT); if (filter_ptr->band_parameters == NULL) { goto free_filter; } filter_ptr->sect = malloc(sizeof(struct param_sect) * bands_count); if (filter_ptr->sect == NULL) { goto free_band_params; } filter_ptr->sample_rate = sample_rate; filter_ptr->bands_count = bands_count; filter_ptr->fade = 0; filter_ptr->gain = 1.0; for (j = 0; j < bands_count; j++) { param_sect_init(filter_ptr->sect + j); } *handle_ptr = (filter_handle)filter_ptr; return true; free_band_params: free(filter_ptr->band_parameters); free_filter: free(filter_ptr); fail: return false; } #define filter_ptr ((struct filter *)handle) void filter_destroy( filter_handle handle) { free(filter_ptr->sect); free(filter_ptr->band_parameters); free(filter_ptr); } void filter_connect_global_parameter( filter_handle handle, unsigned int global_parameter, const float * value_ptr) { assert(global_parameter >= 0); assert(global_parameter < GLOBAL_PARAMETERS_COUNT); filter_ptr->global_parameters[global_parameter] = value_ptr; } void filter_connect_band_parameter( filter_handle handle, unsigned int band_index, unsigned int band_parameter, const float * value_ptr) { assert(band_index >= 0); assert(band_index < filter_ptr->bands_count); assert(band_parameter >= 0); assert(band_parameter < BAND_PARAMETERS_COUNT); filter_ptr->band_parameters[band_index * BAND_PARAMETERS_COUNT + band_parameter] = value_ptr; } void filter_run( filter_handle handle, const float * input_buffer, float * output_buffer, unsigned long samples_count) { int i, j, k; const float * p; float sig[48]; float t, g, d; float fgain; float sfreq[filter_ptr->bands_count]; float sband[filter_ptr->bands_count]; float sgain[filter_ptr->bands_count]; float bands_count; bands_count = filter_ptr->bands_count; fgain = exp2ap(0.1661 * *filter_ptr->global_parameters[GLOBAL_PARAMETER_GAIN]); for (j = 0; j < bands_count; j++) { t = *filter_ptr->band_parameters[BAND_PARAMETERS_COUNT * j + BAND_PARAMETER_FREQUENCY] / filter_ptr->sample_rate; if (t < 0.0002) { t = 0.0002; } else if (t > 0.4998) { t = 0.4998; } sfreq[j] = t; sband[j] = *filter_ptr->band_parameters[BAND_PARAMETERS_COUNT * j + BAND_PARAMETER_BANDWIDTH]; if (*filter_ptr->band_parameters[BAND_PARAMETERS_COUNT * j + BAND_PARAMETER_ACTIVE] > 0.0) { sgain[j] = exp2ap(0.1661 * *filter_ptr->band_parameters[BAND_PARAMETERS_COUNT * j + BAND_PARAMETER_GAIN]); } else { sgain[j] = 1.0; } } while (samples_count) { k = (samples_count > 48) ? 32 : samples_count; t = fgain; g = filter_ptr->gain; if (t > 1.25 * g) { t = 1.25 * g; } else if (t < 0.80 * g) { t = 0.80 * g; } filter_ptr->gain = t; d = (t - g) / k; for (i = 0; i < k; i++) { g += d; sig[i] = g * input_buffer[i]; } for (j = 0; j < bands_count; j++) { param_sect_proc(filter_ptr->sect + j, k, sig, sfreq[j], sband[j], sgain[j]); } j = filter_ptr->fade; g = j / 16.0; p = 0; if (*filter_ptr->global_parameters[GLOBAL_PARAMETER_ACTIVE] > 0.0) { if (j == 16) { p = sig; } else { ++j; } } else { if (j == 0) { p = input_buffer; } else { --j; } } filter_ptr->fade = j; if (p) { memcpy(output_buffer, p, k * sizeof(float)); } else { d = (j / 16.0 - g) / k; for (i = 0; i < k; i++) { g += d; output_buffer[i] = g * sig[i] + (1 - g) * input_buffer[i]; } } input_buffer += k; output_buffer += k; samples_count -= k; } } lv2fil-2.0+20100312.git18130f5a+dfsg0/manifest.ttl0000644000175000017500000000060411346337452020432 0ustar alessioalessio@prefix lv2: . @prefix rdfs: . a lv2:Plugin; a lv2:FilterPlugin; lv2:binary ; rdfs:seeAlso . a lv2:Plugin; a lv2:FilterPlugin; lv2:binary ; rdfs:seeAlso . lv2fil-2.0+20100312.git18130f5a+dfsg0/lv2filter.h0000644000175000017500000000337111346337452020165 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /***************************************************************************** * * Copyright (C) 2006,2007,2008,2009 Nedko Arnaudov * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * *****************************************************************************/ #ifndef LV2FILTER_H__6EC1E456_7DD7_4536_B8D3_F23BE4583A23__INCLUDED #define LV2FILTER_H__6EC1E456_7DD7_4536_B8D3_F23BE4583A23__INCLUDED #define LV2FILTER_MONO_URI "http://nedko.aranaudov.org/soft/filter/2/mono" #define LV2FILTER_STEREO_URI "http://nedko.aranaudov.org/soft/filter/2/stereo" LV2_Handle lv2filter_instantiate( const LV2_Descriptor * descriptor, double sample_rate, const char * bundle_path, const LV2_Feature * const * features); void lv2filter_connect_port( LV2_Handle instance, uint32_t port, void * data_location); void lv2filter_run( LV2_Handle instance, uint32_t samples_count); void lv2filter_cleanup( LV2_Handle instance); const void * lv2filter_extension_data( const char * URI); #endif /* #ifndef LV2FILTER_H__6EC1E456_7DD7_4536_B8D3_F23BE4583A23__INCLUDED */ lv2fil-2.0+20100312.git18130f5a+dfsg0/filter.h0000644000175000017500000000354511346337452017544 0ustar alessioalessio/* -*- Mode: C ; c-basic-offset: 2 -*- */ /* Copyright (C) 2008 Nedko Arnaudov This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ #ifndef FILTER_H__D5DC5ADF_211A_48F6_93A5_68CD3B73D6C5__INCLUDED #define FILTER_H__D5DC5ADF_211A_48F6_93A5_68CD3B73D6C5__INCLUDED typedef struct {int unused; } * filter_handle; #define GLOBAL_PARAMETER_ACTIVE 0 #define GLOBAL_PARAMETER_GAIN 1 #define GLOBAL_PARAMETERS_COUNT 2 #define BAND_PARAMETER_ACTIVE 0 #define BAND_PARAMETER_FREQUENCY 1 #define BAND_PARAMETER_BANDWIDTH 2 #define BAND_PARAMETER_GAIN 3 #define BAND_PARAMETERS_COUNT 4 bool filter_create( float sample_rate, unsigned int bands_count, filter_handle * handle_ptr); void filter_connect_global_parameter( filter_handle handle, unsigned int global_parameter, const float * value_ptr); void filter_connect_band_parameter( filter_handle handle, unsigned int band_index, unsigned int band_parameter, const float * value_ptr); void filter_run( filter_handle handle, const float * input_buffer, float * output_buffer, unsigned long samples_count); void filter_destroy( filter_handle handle); #endif /* #ifndef FILTER_H__D5DC5ADF_211A_48F6_93A5_68CD3B73D6C5__INCLUDED */